├── pythonlib
├── __init__.py
├── amlrealtimeai
│ ├── external
│ │ ├── __init__.py
│ │ ├── tensorflow
│ │ │ ├── core
│ │ │ │ ├── __init__.py
│ │ │ │ ├── framework
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── iterator_pb2.py
│ │ │ │ │ ├── versions_pb2.py
│ │ │ │ │ ├── reader_base_pb2.py
│ │ │ │ │ ├── resource_handle_pb2.py
│ │ │ │ │ ├── tensor_description_pb2.py
│ │ │ │ │ ├── tensor_shape_pb2.py
│ │ │ │ │ ├── allocation_description_pb2.py
│ │ │ │ │ ├── graph_pb2.py
│ │ │ │ │ ├── tensor_slice_pb2.py
│ │ │ │ │ ├── device_attributes_pb2.py
│ │ │ │ │ ├── node_def_pb2.py
│ │ │ │ │ ├── kernel_def_pb2.py
│ │ │ │ │ └── variable_pb2.py
│ │ │ │ ├── protobuf
│ │ │ │ │ ├── master_service_pb2.py
│ │ │ │ │ ├── worker_service_pb2.py
│ │ │ │ │ ├── saved_model_pb2.py
│ │ │ │ │ ├── named_tensor_pb2.py
│ │ │ │ │ ├── critical_section_pb2.py
│ │ │ │ │ ├── queue_runner_pb2.py
│ │ │ │ │ ├── tensorflow_server_pb2.py
│ │ │ │ │ ├── saver_pb2.py
│ │ │ │ │ └── cluster_pb2.py
│ │ │ │ ├── example
│ │ │ │ │ └── example_pb2.py
│ │ │ │ └── lib
│ │ │ │ │ └── core
│ │ │ │ │ └── error_codes_pb2.py
│ │ │ └── __init__.py
│ │ └── tensorflow_serving
│ │ │ ├── __init__.py
│ │ │ └── apis
│ │ │ ├── __init__.py
│ │ │ ├── prediction_service_pb2.py
│ │ │ ├── model_pb2.py
│ │ │ └── prediction_service_pb2_grpc.py
│ ├── __init__.py
│ ├── ScoreImages.py
│ └── client.py
├── requirements.txt
├── setup.py
└── tests
│ └── unit_tests
│ └── test_consumption_client.py
├── notebooks
├── snowleopardgaze.jpg
└── readme.md
├── docs
├── media
│ └── aml-workspace-quota-request-info.png
├── README.md
├── containerization-preview-terms-of-use.md
├── terms-of-use.md
└── SSL-and-auth.md
├── pr.pipeline.yml
├── environment.yml
├── sample-clients
├── csharp
│ ├── README.md
│ ├── client
│ │ ├── IScoringRequest.cs
│ │ ├── CSharpClient.csproj
│ │ ├── IPredictionServiceClient.cs
│ │ ├── FloatRequest.cs
│ │ ├── ImageRequest.cs
│ │ ├── TensorExtensions.cs
│ │ └── ScoringClient.cs
│ ├── resnet
│ │ ├── resnet.csproj
│ │ └── Program.cs
│ ├── protos
│ │ ├── tfserving.csproj
│ │ ├── tensorflow
│ │ │ ├── MasterService.cs
│ │ │ ├── WorkerService.cs
│ │ │ ├── Types.cs
│ │ │ └── NamedTensor.cs
│ │ └── tensorflow-serving
│ │ │ └── PredictionService.cs
│ ├── client.tests
│ │ ├── CSharpClient.Tests.csproj
│ │ └── TensorProtoConvert.cs
│ └── Example.sln
└── README.md
├── run_integration_tests.sh
├── run_unit_tests.sh
├── .github
└── ISSUE_TEMPLATE
│ └── bug_report.md
├── LICENSE
├── SECURITY.md
├── README.md
└── .gitignore
/pythonlib/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow_serving/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/pythonlib/requirements.txt:
--------------------------------------------------------------------------------
1 | tensorflow >= 1.3
2 | grpcio >= 1.0
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow_serving/apis/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/__init__.py:
--------------------------------------------------------------------------------
1 | from .client import PredictionClient
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/notebooks/snowleopardgaze.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure/aml-real-time-ai/HEAD/notebooks/snowleopardgaze.jpg
--------------------------------------------------------------------------------
/docs/media/aml-workspace-quota-request-info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure/aml-real-time-ai/HEAD/docs/media/aml-workspace-quota-request-info.png
--------------------------------------------------------------------------------
/pr.pipeline.yml:
--------------------------------------------------------------------------------
1 | pool:
2 | vmImage: 'Ubuntu-16.04'
3 |
4 | steps:
5 | - task: ShellScript@2
6 | displayName: 'Run unit tests'
7 | inputs:
8 | scriptPath: 'run_unit_tests.sh'
9 |
--------------------------------------------------------------------------------
/notebooks/readme.md:
--------------------------------------------------------------------------------
1 | # Sample Notebooks
2 |
3 | These are sample notebooks you can run to train your model and deploy it to FPGA. More information about Azure ML can be found [here](https://github.com/Azure/MachineLearningNotebooks).
4 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/__init__.py:
--------------------------------------------------------------------------------
1 | #This is a self contained package of the .proto files in tensorflow.
2 | #We default to using the tensorflow installed on the system, but provide this so users don't need the
3 | # whole package.
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: amlrealtimeai
2 | channels:
3 | - defaults
4 | dependencies:
5 | - jupyter=1.0.0
6 | - matplotlib=2.2.2
7 | - python=3.6
8 | - scikit-learn=0.19.1
9 | - tqdm=4.19.5
10 | - tensorflow=1.10.0
11 | - pip:
12 | - --editable ./pythonlib
--------------------------------------------------------------------------------
/sample-clients/csharp/README.md:
--------------------------------------------------------------------------------
1 | This folder contains an example client for consuming GRPC in C#, as well as an example command line application.
2 | Contains code Copyright Microsoft Corporationd and Licensed under the MIT License, as well as code
3 | Copyright the TensorFlow authors and Licensed under the Apache 2.0 License
--------------------------------------------------------------------------------
/sample-clients/csharp/client/IScoringRequest.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 | using Tensorflow.Serving;
4 |
5 | namespace CSharpClient
6 | {
7 | public interface IScoringRequest
8 | {
9 | PredictRequest MakePredictRequest();
10 | }
11 | }
--------------------------------------------------------------------------------
/run_integration_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | conda env create -f environment.yml
4 | source /etc/conda/bin/activate amlrealtimeai
5 | conda install -y pytest
6 | export TEST_SERVICE_PRINCIPAL_KEY=$1
7 | pytest pythonlib/tests/integration_tests
8 | ERR=$?
9 | source /etc/conda/bin/activate base
10 | conda env remove -y -n amlrealtimeai
11 | exit $ERR
12 |
--------------------------------------------------------------------------------
/sample-clients/csharp/resnet/resnet.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/run_unit_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | dotnet test sample-clients/csharp/client.tests
4 | ERR=$?
5 | if [ $ERR -ne 0 ]
6 | then
7 | exit $ERR
8 | fi
9 | source /usr/share/miniconda/etc/profile.d/conda.sh
10 |
11 | conda env create -f environment.yml
12 | conda activate amlrealtimeai
13 | conda install -y pytest pytest-cov
14 | python -m pytest --cov=pythonlib/amlrealtimeai pythonlib/tests/unit_tests
15 | ERR=$?
16 | conda activate base
17 | conda env remove -y -n amlrealtimeai
18 | if [ $ERR -ne 0 ]
19 | then
20 | exit $ERR
21 | fi
22 |
--------------------------------------------------------------------------------
/sample-clients/csharp/protos/tfserving.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netcoreapp2.0
5 |
6 | false
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/sample-clients/csharp/client/CSharpClient.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netcoreapp2.0
5 | false
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | # Docs
2 |
3 | ## Set Up Environment
4 | Follow the instructions in the 00.configuration.ipynb notebook in the [Azure ML MachineLearningNotebooks](https://aka.ms/aml-notebooks) repo.
5 |
6 | **IMPORTANT:** For location, you MUST choose **East US 2** as the region.
7 |
8 | You will create an Azure Machine Learning Workspace, which is required to deploy hardware-accelerated models. If you have an existing workspace in the Azure **East US 2** region, you may skip this step.
9 |
10 | ## Get Workspace Information
11 | When you are asked for information about your workspace for the quota request form you will need these items:
12 | 1. Subscription ID (GUID)
13 | 1. Resource group name
14 | 1. Workspace name
15 |
16 | 
17 |
18 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 |
5 | ---
6 |
7 | **Describe the bug**
8 | A clear and concise description of what the bug is.
9 |
10 | **To Reproduce**
11 | Steps to reproduce the behavior:
12 | 1. Go to '...'
13 | 2. Click on '....'
14 | 3. Scroll down to '....'
15 | 4. See error
16 |
17 | **Expected behavior**
18 | A clear and concise description of what you expected to happen.
19 |
20 | **Screenshots**
21 | If applicable, add screenshots to help explain your problem.
22 |
23 | **Desktop (please complete the following information):**
24 | - OS: [e.g. Windows, Ubuntu]
25 | - Language [e.g. Python, C#]
26 | - Version (package version and language version)
27 |
28 | **Additional context**
29 | Add any other context about the problem here.
30 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/ScoreImages.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | import argparse
4 | from client import PredictionClient
5 |
6 | parser = argparse.ArgumentParser(description='Score some images')
7 | parser.add_argument('host', type=str,
8 | help='Host to score against')
9 | parser.add_argument('images', nargs='+', type=str,
10 | help='Path of images to score')
11 | parser.add_argument('ssl', type=bool, default=False, help='Use SSL to Score')
12 | parser.add_argument('key', type=str, default='', help='Auth key to use to score - only works with SSL')
13 | args = parser.parse_args()
14 |
15 |
16 | port = 443 if args.ssl else 80
17 | client = PredictionClient(args.host, port, args.ssl, args.key)
18 | for path in args.images:
19 | client.score_image(path)
20 |
--------------------------------------------------------------------------------
/sample-clients/csharp/client/IPredictionServiceClient.cs:
--------------------------------------------------------------------------------
1 | using System.Threading.Tasks;
2 | using Tensorflow.Serving;
3 |
4 | namespace CSharpClient
5 | {
6 | public interface IPredictionServiceClient
7 | {
8 | Task PredictAsync(PredictRequest predictRequest);
9 | }
10 |
11 | public class PredictionServiceClientWrapper : IPredictionServiceClient
12 | {
13 | private readonly PredictionService.PredictionServiceClient _predictionServiceClient;
14 |
15 | public PredictionServiceClientWrapper(PredictionService.PredictionServiceClient predictionServiceClient)
16 | {
17 | _predictionServiceClient = predictionServiceClient;
18 | }
19 |
20 | public Task PredictAsync(PredictRequest predictRequest) => _predictionServiceClient.PredictAsync(predictRequest).ResponseAsync;
21 | }
22 | }
--------------------------------------------------------------------------------
/sample-clients/csharp/client.tests/CSharpClient.Tests.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netcoreapp2.0
5 |
6 | false
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | all
15 | runtime; build; native; contentfiles; analyzers
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation. All rights reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
22 |
--------------------------------------------------------------------------------
/sample-clients/csharp/client.tests/TensorProtoConvert.cs:
--------------------------------------------------------------------------------
1 | using System.Linq;
2 | using Tensorflow;
3 | using Xunit;
4 |
5 | namespace CSharpClient.Tests
6 | {
7 | public class TensorProtoConvert
8 | {
9 | [Fact]
10 | public void Converts_tensor_proto_to_float_array()
11 | {
12 | var tensorProto = new TensorProto { Dtype = DataType.DtFloat };
13 | tensorProto.FloatVal.Add(Enumerable.Range(1, 300).Select(x => (float)x));
14 |
15 | tensorProto.TensorShape = new TensorShapeProto();
16 | tensorProto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim());
17 | tensorProto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim());
18 | tensorProto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim());
19 |
20 | tensorProto.TensorShape.Dim[0].Size = 10;
21 | tensorProto.TensorShape.Dim[1].Size = 10;
22 | tensorProto.TensorShape.Dim[2].Size = 3;
23 |
24 | var floats = tensorProto.Convert();
25 | var value = 1;
26 |
27 | for (var i1 = 0; i1 < floats.GetLength(0); i1++)
28 | {
29 | for (var i2 = 0; i2 < floats.GetLength(1); i2++)
30 | {
31 | for (var i3 = 0; i3 < floats.GetLength(2); i3++)
32 | {
33 | Assert.Equal(value++, floats[i1, i2, i3]);
34 | }
35 | }
36 | }
37 | }
38 | }
39 | }
--------------------------------------------------------------------------------
/sample-clients/csharp/client/FloatRequest.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 | using System.IO;
4 | using System.Linq;
5 | using Google.Protobuf;
6 | using Tensorflow;
7 | using Tensorflow.Serving;
8 | using DataType = Tensorflow.DataType;
9 |
10 | namespace CSharpClient
11 | {
12 | using System;
13 | using System.Collections.Generic;
14 | using System.Runtime.CompilerServices;
15 |
16 | using Google.Protobuf.Collections;
17 |
18 | public class FloatRequest : IScoringRequest
19 | {
20 | private readonly PredictRequest _proto;
21 |
22 | public FloatRequest(IDictionary floats)
23 | : this(
24 | floats.ToDictionary(
25 | kvp => kvp.Key,
26 | kvp => new Tuple(kvp.Value, new[] { kvp.Value.Length })))
27 | {
28 | }
29 |
30 | public FloatRequest(IDictionary> inputs)
31 | {
32 | _proto = new PredictRequest { ModelSpec = new ModelSpec() };
33 | foreach (var (key, value) in inputs)
34 | {
35 | _proto.Inputs[key] = makeProto(value);
36 | }
37 | }
38 |
39 | private static TensorProto makeProto(Tuple input)
40 | {
41 | var proto = new TensorProto { Dtype = DataType.DtFloat };
42 | proto.FloatVal.AddRange(input.Item1);
43 | var dims = input.Item2.Select(dim => new TensorShapeProto.Types.Dim { Size = dim });
44 | proto.TensorShape = new TensorShapeProto();
45 | proto.TensorShape.Dim.AddRange(dims);
46 | return proto;
47 | }
48 |
49 | public PredictRequest MakePredictRequest()
50 | {
51 | return this._proto;
52 | }
53 | }
54 | }
--------------------------------------------------------------------------------
/docs/containerization-preview-terms-of-use.md:
--------------------------------------------------------------------------------
1 | # Containerization Preview Terms of Use
2 |
3 | These terms of use apply only to the containerization preview.
4 |
5 | This preview is made available to you on the condition that you agree to the [Supplemental Terms of Use for Microsoft Azure Previews](https://azure.microsoft.com/en-us/support/legal/preview-supplemental-terms/) which supplement [your agreement](https://azure.microsoft.com/en-us/support/legal/) governing use of Azure.
6 |
7 | The preview, including its user interface, features and documentation is confidential and proprietary to Microsoft and its suppliers. For five (5) years after access of this service or its commercial release, whichever is first, you may not disclose confidential information to third parties. You may disclose confidential information only to your employees and consultants who need to know the information. You must have written agreements with them that protect the confidential information at least as much as these terms. Your duty to protect confidential information survives these terms.
8 |
9 | You may disclose confidential information in response to a judicial or governmental order. You must first give written notice to Microsoft to allow it to seek a protective order or otherwise protect the information. Confidential information does not include information that (i) becomes publicly known through no wrongful act; (ii) you received from a third party who did not breach confidentiality obligations to Microsoft or its suppliers; or (iii) you developed independently.
10 |
11 | If you give feedback about the preview to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because Microsoft includes your feedback in them. These rights survive these terms.
12 |
--------------------------------------------------------------------------------
/pythonlib/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 | # -*- encoding: utf-8 -*-
5 | from __future__ import absolute_import
6 | from __future__ import print_function
7 |
8 | import io
9 | import re
10 | from glob import glob
11 | from os.path import basename
12 | from os.path import dirname
13 | from os.path import join
14 | from os.path import splitext
15 |
16 | from setuptools import find_packages
17 | from setuptools import setup
18 |
19 |
20 | def read(*names, **kwargs):
21 | return io.open(
22 | join(dirname(__file__), *names),
23 | encoding=kwargs.get('encoding', 'utf8')
24 | ).read()
25 |
26 |
27 | setup(
28 | name='aml-real-time-ai',
29 | version='0.0.1',
30 | license='MIT',
31 | description='AML Real-Time AI SDK',
32 | long_description='',
33 | author='Microsoft',
34 | author_email='',
35 | url='https://aka.ms/aml-real-time-ai',
36 | # packages=find_packages('src'),
37 | # package_dir={'': 'src'},
38 | include_package_data=True,
39 | zip_safe=False,
40 | classifiers=[
41 | 'Development Status :: 3 - Alpha',
42 | 'Intended Audience :: Developers',
43 | 'License :: OSI Approved :: MIT License',
44 | 'Operating System :: Unix',
45 | 'Operating System :: POSIX',
46 | 'Operating System :: Microsoft :: Windows',
47 | 'Programming Language :: Python',
48 | 'Programming Language :: Python :: 3.6',
49 | ],
50 | keywords=[
51 | # eg: 'keyword1', 'keyword2', 'keyword3',
52 | ],
53 | install_requires=[ "grpcio"
54 | ],
55 | extras_require={
56 | # eg:
57 | # 'rst': ['docutils>=0.11'],
58 | # ':python_version=="2.6"': ['argparse'],
59 | },
60 | entry_points={
61 | 'console_scripts': [
62 | 'nameless = nameless.cli:main',
63 | ]
64 | },
65 | )
66 |
--------------------------------------------------------------------------------
/sample-clients/csharp/client/ImageRequest.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 | using System.IO;
4 | using System.Linq;
5 | using Google.Protobuf;
6 | using Tensorflow;
7 | using Tensorflow.Serving;
8 | using DataType = Tensorflow.DataType;
9 |
10 | namespace CSharpClient
11 | {
12 | public class ImageRequest : IScoringRequest
13 | {
14 | private readonly ModelSpec _modelSpec;
15 | private readonly TensorProto _proto;
16 |
17 | private readonly string _inputName;
18 |
19 | public ImageRequest(params Stream[] images)
20 | {
21 | _modelSpec = new ModelSpec();
22 | _proto = new TensorProto { Dtype = DataType.DtString };
23 |
24 | var bytes = images.Select(ByteString.FromStream);
25 | _proto.StringVal.AddRange(bytes);
26 | _proto.TensorShape = new TensorShapeProto();
27 | _proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim());
28 | _proto.TensorShape.Dim[0].Size = images.Length;
29 | _inputName = "images";
30 | }
31 |
32 | public ImageRequest(string inputName, params Stream[] images)
33 | {
34 | _modelSpec = new ModelSpec();
35 | _proto = new TensorProto { Dtype = DataType.DtString };
36 |
37 | var bytes = images.Select(ByteString.FromStream);
38 | _proto.StringVal.AddRange(bytes);
39 | _proto.TensorShape = new TensorShapeProto();
40 | _proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim());
41 | _proto.TensorShape.Dim[0].Size = images.Length;
42 | _inputName = inputName;
43 | }
44 |
45 | public PredictRequest MakePredictRequest()
46 | {
47 | var request = new PredictRequest { ModelSpec = _modelSpec };
48 |
49 | request.Inputs[this._inputName] = _proto;
50 | return request;
51 | }
52 | }
53 | }
--------------------------------------------------------------------------------
/docs/terms-of-use.md:
--------------------------------------------------------------------------------
1 | # Terms of Use
2 |
3 | **BY USING THIS PREVIEW SERVICE, PARTICIPANT ACCEPTS THESE TERMS. IF PARTICIPANT DOES NOT ACCEPT THESE TERMS, DO NOT USE THE PREVIEW SERVICE.**
4 |
5 | Your use of Azure ML Hardware Accelerated Service is governed by the [Microsoft Online Subscription Agreement](https://azure.microsoft.com/en-us/support/legal/subscription-agreement/) (which incorporates the [Online Services Terms](http://www.microsoftvolumelicensing.com/DocumentSearch.aspx?Mode=3&DocumentTypeId=31)) and the terms set out on this page. Azure ML Hardware Accelerated Service are private previews offered by Microsoft to obtain customer feedback ("Previews").
6 | You are responsible for obtaining all rights and permissions applicable to all Customer Data that you upload to Microsoft for use with Azure ML Hardware Accelerated Service. These may include but are not limited to:
7 | * copyrights in the content
8 | * copyrights and trademarks applicable to any materials contained or depicted in, or required for your distribution or performance of such content
9 | * any intellectual property rights (including rights of publicity and any rights attaching to goodwill or reputation) applicable to any people or any fictional characters whose names, voices, images, signatures, or unique characteristics are used as part of any content
10 | * compliance with terms of any license or agreement for such content, including obligations arising under any applicable collective bargaining or guild agreements.
11 |
12 | PREVIEWS ARE PROVIDED "AS-IS," "WITH ALL FAULTS," AND "AS AVAILABLE," AND ARE EXCLUDED FROM THE SERVICE LEVEL AGREEMENTS AND LIMITED WARRANTY. Previews are not covered by customer support. Previews employ lesser or different privacy and security measures than those typically present in the Online Services. You should not use Previews to process Personal Data or other data that is subject to heightened compliance requirements. We may change or discontinue Previews at any time without notice. We may choose not to release a Preview into "General Availability."
13 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/master_service_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/master_service.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..protobuf import master_pb2 as tensorflow_dot_core_dot_protobuf_dot_master__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/protobuf/master_service.proto',
21 | package='tensorflow.grpc',
22 | syntax='proto3',
23 | serialized_pb=_b('\n-tensorflow/core/protobuf/master_service.proto\x12\x0ftensorflow.grpc\x1a%tensorflow/core/protobuf/master.proto2\xbc\x04\n\rMasterService\x12T\n\rCreateSession\x12 .tensorflow.CreateSessionRequest\x1a!.tensorflow.CreateSessionResponse\x12T\n\rExtendSession\x12 .tensorflow.ExtendSessionRequest\x1a!.tensorflow.ExtendSessionResponse\x12Z\n\x0fPartialRunSetup\x12\".tensorflow.PartialRunSetupRequest\x1a#.tensorflow.PartialRunSetupResponse\x12\x42\n\x07RunStep\x12\x1a.tensorflow.RunStepRequest\x1a\x1b.tensorflow.RunStepResponse\x12Q\n\x0c\x43loseSession\x12\x1f.tensorflow.CloseSessionRequest\x1a .tensorflow.CloseSessionResponse\x12N\n\x0bListDevices\x12\x1e.tensorflow.ListDevicesRequest\x1a\x1f.tensorflow.ListDevicesResponse\x12<\n\x05Reset\x12\x18.tensorflow.ResetRequest\x1a\x19.tensorflow.ResetResponseB3\n\x1aorg.tensorflow.distruntimeB\x13MasterServiceProtosP\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_protobuf_dot_master__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 |
32 | DESCRIPTOR.has_options = True
33 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032org.tensorflow.distruntimeB\023MasterServiceProtosP\001'))
34 | # @@protoc_insertion_point(module_scope)
35 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/worker_service_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/worker_service.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..protobuf import worker_pb2 as tensorflow_dot_core_dot_protobuf_dot_worker__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/protobuf/worker_service.proto',
21 | package='tensorflow.grpc',
22 | syntax='proto3',
23 | serialized_pb=_b('\n-tensorflow/core/protobuf/worker_service.proto\x12\x0ftensorflow.grpc\x1a%tensorflow/core/protobuf/worker.proto2\x99\x07\n\rWorkerService\x12H\n\tGetStatus\x12\x1c.tensorflow.GetStatusRequest\x1a\x1d.tensorflow.GetStatusResponse\x12\x66\n\x13\x43reateWorkerSession\x12&.tensorflow.CreateWorkerSessionRequest\x1a\'.tensorflow.CreateWorkerSessionResponse\x12\x66\n\x13\x44\x65leteWorkerSession\x12&.tensorflow.DeleteWorkerSessionRequest\x1a\'.tensorflow.DeleteWorkerSessionResponse\x12T\n\rRegisterGraph\x12 .tensorflow.RegisterGraphRequest\x1a!.tensorflow.RegisterGraphResponse\x12Z\n\x0f\x44\x65registerGraph\x12\".tensorflow.DeregisterGraphRequest\x1a#.tensorflow.DeregisterGraphResponse\x12\x45\n\x08RunGraph\x12\x1b.tensorflow.RunGraphRequest\x1a\x1c.tensorflow.RunGraphResponse\x12Q\n\x0c\x43leanupGraph\x12\x1f.tensorflow.CleanupGraphRequest\x1a .tensorflow.CleanupGraphResponse\x12K\n\nCleanupAll\x12\x1d.tensorflow.CleanupAllRequest\x1a\x1e.tensorflow.CleanupAllResponse\x12M\n\nRecvTensor\x12\x1d.tensorflow.RecvTensorRequest\x1a\x1e.tensorflow.RecvTensorResponse\"\x00\x12\x42\n\x07Logging\x12\x1a.tensorflow.LoggingRequest\x1a\x1b.tensorflow.LoggingResponse\x12\x42\n\x07Tracing\x12\x1a.tensorflow.TracingRequest\x1a\x1b.tensorflow.TracingResponseB3\n\x1aorg.tensorflow.distruntimeB\x13WorkerServiceProtosP\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_protobuf_dot_worker__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 |
32 | DESCRIPTOR.has_options = True
33 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032org.tensorflow.distruntimeB\023WorkerServiceProtosP\001'))
34 | # @@protoc_insertion_point(module_scope)
35 |
--------------------------------------------------------------------------------
/sample-clients/csharp/protos/tensorflow/MasterService.cs:
--------------------------------------------------------------------------------
1 | // Generated by the protocol buffer compiler. DO NOT EDIT!
2 | // source: tensorflow/core/protobuf/master_service.proto
3 | #pragma warning disable 1591, 0612, 3021
4 | #region Designer generated code
5 |
6 | using pb = global::Google.Protobuf;
7 | using pbc = global::Google.Protobuf.Collections;
8 | using pbr = global::Google.Protobuf.Reflection;
9 | using scg = global::System.Collections.Generic;
10 | namespace Tensorflow.Grpc {
11 |
12 | /// Holder for reflection information generated from tensorflow/core/protobuf/master_service.proto
13 | public static partial class MasterServiceReflection {
14 |
15 | #region Descriptor
16 | /// File descriptor for tensorflow/core/protobuf/master_service.proto
17 | public static pbr::FileDescriptor Descriptor {
18 | get { return descriptor; }
19 | }
20 | private static pbr::FileDescriptor descriptor;
21 |
22 | static MasterServiceReflection() {
23 | byte[] descriptorData = global::System.Convert.FromBase64String(
24 | string.Concat(
25 | "Ci10ZW5zb3JmbG93L2NvcmUvcHJvdG9idWYvbWFzdGVyX3NlcnZpY2UucHJv",
26 | "dG8SD3RlbnNvcmZsb3cuZ3JwYxoldGVuc29yZmxvdy9jb3JlL3Byb3RvYnVm",
27 | "L21hc3Rlci5wcm90bzK8BAoNTWFzdGVyU2VydmljZRJUCg1DcmVhdGVTZXNz",
28 | "aW9uEiAudGVuc29yZmxvdy5DcmVhdGVTZXNzaW9uUmVxdWVzdBohLnRlbnNv",
29 | "cmZsb3cuQ3JlYXRlU2Vzc2lvblJlc3BvbnNlElQKDUV4dGVuZFNlc3Npb24S",
30 | "IC50ZW5zb3JmbG93LkV4dGVuZFNlc3Npb25SZXF1ZXN0GiEudGVuc29yZmxv",
31 | "dy5FeHRlbmRTZXNzaW9uUmVzcG9uc2USWgoPUGFydGlhbFJ1blNldHVwEiIu",
32 | "dGVuc29yZmxvdy5QYXJ0aWFsUnVuU2V0dXBSZXF1ZXN0GiMudGVuc29yZmxv",
33 | "dy5QYXJ0aWFsUnVuU2V0dXBSZXNwb25zZRJCCgdSdW5TdGVwEhoudGVuc29y",
34 | "Zmxvdy5SdW5TdGVwUmVxdWVzdBobLnRlbnNvcmZsb3cuUnVuU3RlcFJlc3Bv",
35 | "bnNlElEKDENsb3NlU2Vzc2lvbhIfLnRlbnNvcmZsb3cuQ2xvc2VTZXNzaW9u",
36 | "UmVxdWVzdBogLnRlbnNvcmZsb3cuQ2xvc2VTZXNzaW9uUmVzcG9uc2USTgoL",
37 | "TGlzdERldmljZXMSHi50ZW5zb3JmbG93Lkxpc3REZXZpY2VzUmVxdWVzdBof",
38 | "LnRlbnNvcmZsb3cuTGlzdERldmljZXNSZXNwb25zZRI8CgVSZXNldBIYLnRl",
39 | "bnNvcmZsb3cuUmVzZXRSZXF1ZXN0GhkudGVuc29yZmxvdy5SZXNldFJlc3Bv",
40 | "bnNlQjMKGm9yZy50ZW5zb3JmbG93LmRpc3RydW50aW1lQhNNYXN0ZXJTZXJ2",
41 | "aWNlUHJvdG9zUAFiBnByb3RvMw=="));
42 | descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
43 | new pbr::FileDescriptor[] { global::Tensorflow.MasterReflection.Descriptor, },
44 | new pbr::GeneratedClrTypeInfo(null, null));
45 | }
46 | #endregion
47 |
48 | }
49 | }
50 |
51 | #endregion Designer generated code
52 |
--------------------------------------------------------------------------------
/sample-clients/csharp/client/TensorExtensions.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 | using System;
4 | using System.Collections.Generic;
5 | using System.Linq;
6 | using Tensorflow;
7 |
8 | namespace CSharpClient
9 | {
10 | public static class TensorExtensions
11 | {
12 | public static T Convert(this TensorProto tensor) where T : class
13 | {
14 | var shape = tensor.TensorShape;
15 | var dimCount = shape.Dim.Count;
16 |
17 | var resultType = typeof(T);
18 |
19 | if (!resultType.IsArray)
20 | {
21 | throw new Exception("Unable to convert tensor into scalar type");
22 | }
23 |
24 | var arrayRank = typeof(T).GetArrayRank();
25 |
26 | if (arrayRank != dimCount)
27 | {
28 | throw new Exception($"result tensor was not the expected rank {arrayRank} - was rank {dimCount}");
29 | }
30 |
31 | var elementType = resultType.GetElementType();
32 |
33 | Func getItemFunc = null;
34 |
35 | if (elementType == typeof(float))
36 | {
37 | getItemFunc = (t, i) => t.FloatVal[i];
38 | }
39 |
40 | if (getItemFunc == null)
41 | {
42 | throw new Exception($"Don't know how to handle type {elementType}");
43 | }
44 |
45 | var dimSizes = shape.Dim.Select(d => (int)d.Size).ToArray();
46 | var sysArray = Array.CreateInstance(elementType, dimSizes);
47 | var tensorIndex = 0;
48 |
49 | foreach (var dimArray in GetPermutations(dimSizes))
50 | {
51 | sysArray.SetValue(getItemFunc(tensor, tensorIndex), dimArray);
52 | tensorIndex++;
53 | }
54 |
55 | return sysArray as T;
56 | }
57 |
58 | public static IEnumerable GetPermutations(this int[] maxValues)
59 | {
60 | return GetPermutations(new int[maxValues.Length], 0, maxValues);
61 | }
62 |
63 | private static IEnumerable GetPermutations(int[] values, int index, int[] maxValues)
64 | {
65 | if (index >= values.Length)
66 | {
67 | return new[] { values };
68 | }
69 |
70 | var result = new List();
71 |
72 | for (var i = 0; i < maxValues[index]; i++)
73 | {
74 | var currentValues = values.ToArray();
75 | currentValues[index] = i;
76 | result.AddRange(GetPermutations(currentValues, index + 1, maxValues));
77 | }
78 |
79 | return result;
80 | }
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow_serving/apis/prediction_service_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow_serving/apis/prediction_service.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..apis import classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2
17 | from ..apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
18 | from ..apis import inference_pb2 as tensorflow__serving_dot_apis_dot_inference__pb2
19 | from ..apis import predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2
20 | from ..apis import regression_pb2 as tensorflow__serving_dot_apis_dot_regression__pb2
21 |
22 |
23 | DESCRIPTOR = _descriptor.FileDescriptor(
24 | name='tensorflow_serving/apis/prediction_service.proto',
25 | package='tensorflow.serving',
26 | syntax='proto3',
27 | serialized_pb=_b('\n0tensorflow_serving/apis/prediction_service.proto\x12\x12tensorflow.serving\x1a,tensorflow_serving/apis/classification.proto\x1a\x30tensorflow_serving/apis/get_model_metadata.proto\x1a\'tensorflow_serving/apis/inference.proto\x1a%tensorflow_serving/apis/predict.proto\x1a(tensorflow_serving/apis/regression.proto2\xfc\x03\n\x11PredictionService\x12\x61\n\x08\x43lassify\x12).tensorflow.serving.ClassificationRequest\x1a*.tensorflow.serving.ClassificationResponse\x12X\n\x07Regress\x12%.tensorflow.serving.RegressionRequest\x1a&.tensorflow.serving.RegressionResponse\x12R\n\x07Predict\x12\".tensorflow.serving.PredictRequest\x1a#.tensorflow.serving.PredictResponse\x12g\n\x0eMultiInference\x12).tensorflow.serving.MultiInferenceRequest\x1a*.tensorflow.serving.MultiInferenceResponse\x12m\n\x10GetModelMetadata\x12+.tensorflow.serving.GetModelMetadataRequest\x1a,.tensorflow.serving.GetModelMetadataResponseB\x03\xf8\x01\x01\x62\x06proto3')
28 | ,
29 | dependencies=[tensorflow__serving_dot_apis_dot_classification__pb2.DESCRIPTOR,tensorflow__serving_dot_apis_dot_get__model__metadata__pb2.DESCRIPTOR,tensorflow__serving_dot_apis_dot_inference__pb2.DESCRIPTOR,tensorflow__serving_dot_apis_dot_predict__pb2.DESCRIPTOR,tensorflow__serving_dot_apis_dot_regression__pb2.DESCRIPTOR,])
30 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
31 |
32 |
33 |
34 |
35 |
36 | DESCRIPTOR.has_options = True
37 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
38 | # @@protoc_insertion_point(module_scope)
39 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Security
4 |
5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
6 |
7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below.
8 |
9 | ## Reporting Security Issues
10 |
11 | **Please do not report security vulnerabilities through public GitHub issues.**
12 |
13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
14 |
15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey).
16 |
17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc).
18 |
19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
20 |
21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
22 | * Full paths of source file(s) related to the manifestation of the issue
23 | * The location of the affected source code (tag/branch/commit or direct URL)
24 | * Any special configuration required to reproduce the issue
25 | * Step-by-step instructions to reproduce the issue
26 | * Proof-of-concept or exploit code (if possible)
27 | * Impact of the issue, including how an attacker might exploit the issue
28 |
29 | This information will help us triage your report more quickly.
30 |
31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs.
32 |
33 | ## Preferred Languages
34 |
35 | We prefer all communications to be in English.
36 |
37 | ## Policy
38 |
39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd).
40 |
41 |
42 |
--------------------------------------------------------------------------------
/sample-clients/csharp/Example.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio 15
4 | VisualStudioVersion = 15.0.27130.2027
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "tfserving", "protos\tfserving.csproj", "{DBAE814C-26E8-4CFE-9BAF-CE605CA8B02F}"
7 | EndProject
8 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CSharpClient", "client\CSharpClient.csproj", "{9369A5F6-FE4A-4116-9558-6B8D2A09C7EB}"
9 | EndProject
10 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "resnet", "resnet\resnet.csproj", "{47F112D5-8D41-4E7E-9BF0-E6B8C18FBFB3}"
11 | ProjectSection(ProjectDependencies) = postProject
12 | {DBAE814C-26E8-4CFE-9BAF-CE605CA8B02F} = {DBAE814C-26E8-4CFE-9BAF-CE605CA8B02F}
13 | {9369A5F6-FE4A-4116-9558-6B8D2A09C7EB} = {9369A5F6-FE4A-4116-9558-6B8D2A09C7EB}
14 | EndProjectSection
15 | EndProject
16 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CSharpClient.Tests", "client.tests\CSharpClient.Tests.csproj", "{867D4433-A7BC-482B-ADAE-08DBF7205703}"
17 | EndProject
18 | Global
19 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
20 | Debug|Any CPU = Debug|Any CPU
21 | Release|Any CPU = Release|Any CPU
22 | EndGlobalSection
23 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
24 | {DBAE814C-26E8-4CFE-9BAF-CE605CA8B02F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
25 | {DBAE814C-26E8-4CFE-9BAF-CE605CA8B02F}.Debug|Any CPU.Build.0 = Debug|Any CPU
26 | {DBAE814C-26E8-4CFE-9BAF-CE605CA8B02F}.Release|Any CPU.ActiveCfg = Release|Any CPU
27 | {DBAE814C-26E8-4CFE-9BAF-CE605CA8B02F}.Release|Any CPU.Build.0 = Release|Any CPU
28 | {9369A5F6-FE4A-4116-9558-6B8D2A09C7EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
29 | {9369A5F6-FE4A-4116-9558-6B8D2A09C7EB}.Debug|Any CPU.Build.0 = Debug|Any CPU
30 | {9369A5F6-FE4A-4116-9558-6B8D2A09C7EB}.Release|Any CPU.ActiveCfg = Release|Any CPU
31 | {9369A5F6-FE4A-4116-9558-6B8D2A09C7EB}.Release|Any CPU.Build.0 = Release|Any CPU
32 | {47F112D5-8D41-4E7E-9BF0-E6B8C18FBFB3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
33 | {47F112D5-8D41-4E7E-9BF0-E6B8C18FBFB3}.Debug|Any CPU.Build.0 = Debug|Any CPU
34 | {47F112D5-8D41-4E7E-9BF0-E6B8C18FBFB3}.Release|Any CPU.ActiveCfg = Release|Any CPU
35 | {47F112D5-8D41-4E7E-9BF0-E6B8C18FBFB3}.Release|Any CPU.Build.0 = Release|Any CPU
36 | {867D4433-A7BC-482B-ADAE-08DBF7205703}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
37 | {867D4433-A7BC-482B-ADAE-08DBF7205703}.Debug|Any CPU.Build.0 = Debug|Any CPU
38 | {867D4433-A7BC-482B-ADAE-08DBF7205703}.Release|Any CPU.ActiveCfg = Release|Any CPU
39 | {867D4433-A7BC-482B-ADAE-08DBF7205703}.Release|Any CPU.Build.0 = Release|Any CPU
40 | EndGlobalSection
41 | GlobalSection(SolutionProperties) = preSolution
42 | HideSolutionNode = FALSE
43 | EndGlobalSection
44 | GlobalSection(ExtensibilityGlobals) = postSolution
45 | SolutionGuid = {A2AA42E0-11C1-4C3A-AC4D-4083AEBF7F38}
46 | EndGlobalSection
47 | EndGlobal
48 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/iterator_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/iterator.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/iterator.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n(tensorflow/core/framework/iterator.proto\x12\ntensorflow\"6\n\x15IteratorStateMetadata\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\tB*\n\x13org.tensorflow.utilB\x0eIteratorProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _ITERATORSTATEMETADATA = _descriptor.Descriptor(
30 | name='IteratorStateMetadata',
31 | full_name='tensorflow.IteratorStateMetadata',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='version', full_name='tensorflow.IteratorStateMetadata.version', index=0,
38 | number=1, type=9, cpp_type=9, label=1,
39 | has_default_value=False, default_value=_b("").decode('utf-8'),
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='keys', full_name='tensorflow.IteratorStateMetadata.keys', index=1,
45 | number=2, type=9, cpp_type=9, label=3,
46 | has_default_value=False, default_value=[],
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | ],
51 | extensions=[
52 | ],
53 | nested_types=[],
54 | enum_types=[
55 | ],
56 | options=None,
57 | is_extendable=False,
58 | syntax='proto3',
59 | extension_ranges=[],
60 | oneofs=[
61 | ],
62 | serialized_start=56,
63 | serialized_end=110,
64 | )
65 |
66 | DESCRIPTOR.message_types_by_name['IteratorStateMetadata'] = _ITERATORSTATEMETADATA
67 |
68 | IteratorStateMetadata = _reflection.GeneratedProtocolMessageType('IteratorStateMetadata', (_message.Message,), dict(
69 | DESCRIPTOR = _ITERATORSTATEMETADATA,
70 | __module__ = 'tensorflow.core.framework.iterator_pb2'
71 | # @@protoc_insertion_point(class_scope:tensorflow.IteratorStateMetadata)
72 | ))
73 | _sym_db.RegisterMessage(IteratorStateMetadata)
74 |
75 |
76 | DESCRIPTOR.has_options = True
77 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023org.tensorflow.utilB\016IteratorProtosP\001\370\001\001'))
78 | # @@protoc_insertion_point(module_scope)
79 |
--------------------------------------------------------------------------------
/sample-clients/csharp/protos/tensorflow-serving/PredictionService.cs:
--------------------------------------------------------------------------------
1 | // Generated by the protocol buffer compiler. DO NOT EDIT!
2 | // source: tensorflow_serving/apis/prediction_service.proto
3 | #pragma warning disable 1591, 0612, 3021
4 | #region Designer generated code
5 |
6 | using pb = global::Google.Protobuf;
7 | using pbc = global::Google.Protobuf.Collections;
8 | using pbr = global::Google.Protobuf.Reflection;
9 | using scg = global::System.Collections.Generic;
10 | namespace Tensorflow.Serving {
11 |
12 | /// Holder for reflection information generated from tensorflow_serving/apis/prediction_service.proto
13 | public static partial class PredictionServiceReflection {
14 |
15 | #region Descriptor
16 | /// File descriptor for tensorflow_serving/apis/prediction_service.proto
17 | public static pbr::FileDescriptor Descriptor {
18 | get { return descriptor; }
19 | }
20 | private static pbr::FileDescriptor descriptor;
21 |
22 | static PredictionServiceReflection() {
23 | byte[] descriptorData = global::System.Convert.FromBase64String(
24 | string.Concat(
25 | "CjB0ZW5zb3JmbG93X3NlcnZpbmcvYXBpcy9wcmVkaWN0aW9uX3NlcnZpY2Uu",
26 | "cHJvdG8SEnRlbnNvcmZsb3cuc2VydmluZxosdGVuc29yZmxvd19zZXJ2aW5n",
27 | "L2FwaXMvY2xhc3NpZmljYXRpb24ucHJvdG8aMHRlbnNvcmZsb3dfc2Vydmlu",
28 | "Zy9hcGlzL2dldF9tb2RlbF9tZXRhZGF0YS5wcm90bxondGVuc29yZmxvd19z",
29 | "ZXJ2aW5nL2FwaXMvaW5mZXJlbmNlLnByb3RvGiV0ZW5zb3JmbG93X3NlcnZp",
30 | "bmcvYXBpcy9wcmVkaWN0LnByb3RvGih0ZW5zb3JmbG93X3NlcnZpbmcvYXBp",
31 | "cy9yZWdyZXNzaW9uLnByb3RvMvwDChFQcmVkaWN0aW9uU2VydmljZRJhCghD",
32 | "bGFzc2lmeRIpLnRlbnNvcmZsb3cuc2VydmluZy5DbGFzc2lmaWNhdGlvblJl",
33 | "cXVlc3QaKi50ZW5zb3JmbG93LnNlcnZpbmcuQ2xhc3NpZmljYXRpb25SZXNw",
34 | "b25zZRJYCgdSZWdyZXNzEiUudGVuc29yZmxvdy5zZXJ2aW5nLlJlZ3Jlc3Np",
35 | "b25SZXF1ZXN0GiYudGVuc29yZmxvdy5zZXJ2aW5nLlJlZ3Jlc3Npb25SZXNw",
36 | "b25zZRJSCgdQcmVkaWN0EiIudGVuc29yZmxvdy5zZXJ2aW5nLlByZWRpY3RS",
37 | "ZXF1ZXN0GiMudGVuc29yZmxvdy5zZXJ2aW5nLlByZWRpY3RSZXNwb25zZRJn",
38 | "Cg5NdWx0aUluZmVyZW5jZRIpLnRlbnNvcmZsb3cuc2VydmluZy5NdWx0aUlu",
39 | "ZmVyZW5jZVJlcXVlc3QaKi50ZW5zb3JmbG93LnNlcnZpbmcuTXVsdGlJbmZl",
40 | "cmVuY2VSZXNwb25zZRJtChBHZXRNb2RlbE1ldGFkYXRhEisudGVuc29yZmxv",
41 | "dy5zZXJ2aW5nLkdldE1vZGVsTWV0YWRhdGFSZXF1ZXN0GiwudGVuc29yZmxv",
42 | "dy5zZXJ2aW5nLkdldE1vZGVsTWV0YWRhdGFSZXNwb25zZUID+AEBYgZwcm90",
43 | "bzM="));
44 | descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
45 | new pbr::FileDescriptor[] { global::Tensorflow.Serving.ClassificationReflection.Descriptor, global::Tensorflow.Serving.GetModelMetadataReflection.Descriptor, global::Tensorflow.Serving.InferenceReflection.Descriptor, global::Tensorflow.Serving.PredictReflection.Descriptor, global::Tensorflow.Serving.RegressionReflection.Descriptor, },
46 | new pbr::GeneratedClrTypeInfo(null, null));
47 | }
48 | #endregion
49 |
50 | }
51 | }
52 |
53 | #endregion Designer generated code
54 |
--------------------------------------------------------------------------------
/sample-clients/csharp/protos/tensorflow/WorkerService.cs:
--------------------------------------------------------------------------------
1 | // Generated by the protocol buffer compiler. DO NOT EDIT!
2 | // source: tensorflow/core/protobuf/worker_service.proto
3 | #pragma warning disable 1591, 0612, 3021
4 | #region Designer generated code
5 |
6 | using pb = global::Google.Protobuf;
7 | using pbc = global::Google.Protobuf.Collections;
8 | using pbr = global::Google.Protobuf.Reflection;
9 | using scg = global::System.Collections.Generic;
10 | namespace Tensorflow.Grpc {
11 |
12 | /// Holder for reflection information generated from tensorflow/core/protobuf/worker_service.proto
13 | public static partial class WorkerServiceReflection {
14 |
15 | #region Descriptor
16 | /// File descriptor for tensorflow/core/protobuf/worker_service.proto
17 | public static pbr::FileDescriptor Descriptor {
18 | get { return descriptor; }
19 | }
20 | private static pbr::FileDescriptor descriptor;
21 |
22 | static WorkerServiceReflection() {
23 | byte[] descriptorData = global::System.Convert.FromBase64String(
24 | string.Concat(
25 | "Ci10ZW5zb3JmbG93L2NvcmUvcHJvdG9idWYvd29ya2VyX3NlcnZpY2UucHJv",
26 | "dG8SD3RlbnNvcmZsb3cuZ3JwYxoldGVuc29yZmxvdy9jb3JlL3Byb3RvYnVm",
27 | "L3dvcmtlci5wcm90bzKZBwoNV29ya2VyU2VydmljZRJICglHZXRTdGF0dXMS",
28 | "HC50ZW5zb3JmbG93LkdldFN0YXR1c1JlcXVlc3QaHS50ZW5zb3JmbG93Lkdl",
29 | "dFN0YXR1c1Jlc3BvbnNlEmYKE0NyZWF0ZVdvcmtlclNlc3Npb24SJi50ZW5z",
30 | "b3JmbG93LkNyZWF0ZVdvcmtlclNlc3Npb25SZXF1ZXN0GicudGVuc29yZmxv",
31 | "dy5DcmVhdGVXb3JrZXJTZXNzaW9uUmVzcG9uc2USZgoTRGVsZXRlV29ya2Vy",
32 | "U2Vzc2lvbhImLnRlbnNvcmZsb3cuRGVsZXRlV29ya2VyU2Vzc2lvblJlcXVl",
33 | "c3QaJy50ZW5zb3JmbG93LkRlbGV0ZVdvcmtlclNlc3Npb25SZXNwb25zZRJU",
34 | "Cg1SZWdpc3RlckdyYXBoEiAudGVuc29yZmxvdy5SZWdpc3RlckdyYXBoUmVx",
35 | "dWVzdBohLnRlbnNvcmZsb3cuUmVnaXN0ZXJHcmFwaFJlc3BvbnNlEloKD0Rl",
36 | "cmVnaXN0ZXJHcmFwaBIiLnRlbnNvcmZsb3cuRGVyZWdpc3RlckdyYXBoUmVx",
37 | "dWVzdBojLnRlbnNvcmZsb3cuRGVyZWdpc3RlckdyYXBoUmVzcG9uc2USRQoI",
38 | "UnVuR3JhcGgSGy50ZW5zb3JmbG93LlJ1bkdyYXBoUmVxdWVzdBocLnRlbnNv",
39 | "cmZsb3cuUnVuR3JhcGhSZXNwb25zZRJRCgxDbGVhbnVwR3JhcGgSHy50ZW5z",
40 | "b3JmbG93LkNsZWFudXBHcmFwaFJlcXVlc3QaIC50ZW5zb3JmbG93LkNsZWFu",
41 | "dXBHcmFwaFJlc3BvbnNlEksKCkNsZWFudXBBbGwSHS50ZW5zb3JmbG93LkNs",
42 | "ZWFudXBBbGxSZXF1ZXN0Gh4udGVuc29yZmxvdy5DbGVhbnVwQWxsUmVzcG9u",
43 | "c2USTQoKUmVjdlRlbnNvchIdLnRlbnNvcmZsb3cuUmVjdlRlbnNvclJlcXVl",
44 | "c3QaHi50ZW5zb3JmbG93LlJlY3ZUZW5zb3JSZXNwb25zZSIAEkIKB0xvZ2dp",
45 | "bmcSGi50ZW5zb3JmbG93LkxvZ2dpbmdSZXF1ZXN0GhsudGVuc29yZmxvdy5M",
46 | "b2dnaW5nUmVzcG9uc2USQgoHVHJhY2luZxIaLnRlbnNvcmZsb3cuVHJhY2lu",
47 | "Z1JlcXVlc3QaGy50ZW5zb3JmbG93LlRyYWNpbmdSZXNwb25zZUIzChpvcmcu",
48 | "dGVuc29yZmxvdy5kaXN0cnVudGltZUITV29ya2VyU2VydmljZVByb3Rvc1AB",
49 | "YgZwcm90bzM="));
50 | descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
51 | new pbr::FileDescriptor[] { global::Tensorflow.WorkerReflection.Descriptor, },
52 | new pbr::GeneratedClrTypeInfo(null, null));
53 | }
54 | #endregion
55 |
56 | }
57 | }
58 |
59 | #endregion Designer generated code
60 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/versions_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/versions.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/versions.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n(tensorflow/core/framework/versions.proto\x12\ntensorflow\"K\n\nVersionDef\x12\x10\n\x08producer\x18\x01 \x01(\x05\x12\x14\n\x0cmin_consumer\x18\x02 \x01(\x05\x12\x15\n\rbad_consumers\x18\x03 \x03(\x05\x42/\n\x18org.tensorflow.frameworkB\x0eVersionsProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _VERSIONDEF = _descriptor.Descriptor(
30 | name='VersionDef',
31 | full_name='tensorflow.VersionDef',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='producer', full_name='tensorflow.VersionDef.producer', index=0,
38 | number=1, type=5, cpp_type=1, label=1,
39 | has_default_value=False, default_value=0,
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='min_consumer', full_name='tensorflow.VersionDef.min_consumer', index=1,
45 | number=2, type=5, cpp_type=1, label=1,
46 | has_default_value=False, default_value=0,
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | _descriptor.FieldDescriptor(
51 | name='bad_consumers', full_name='tensorflow.VersionDef.bad_consumers', index=2,
52 | number=3, type=5, cpp_type=1, label=3,
53 | has_default_value=False, default_value=[],
54 | message_type=None, enum_type=None, containing_type=None,
55 | is_extension=False, extension_scope=None,
56 | options=None),
57 | ],
58 | extensions=[
59 | ],
60 | nested_types=[],
61 | enum_types=[
62 | ],
63 | options=None,
64 | is_extendable=False,
65 | syntax='proto3',
66 | extension_ranges=[],
67 | oneofs=[
68 | ],
69 | serialized_start=56,
70 | serialized_end=131,
71 | )
72 |
73 | DESCRIPTOR.message_types_by_name['VersionDef'] = _VERSIONDEF
74 |
75 | VersionDef = _reflection.GeneratedProtocolMessageType('VersionDef', (_message.Message,), dict(
76 | DESCRIPTOR = _VERSIONDEF,
77 | __module__ = 'tensorflow.core.framework.versions_pb2'
78 | # @@protoc_insertion_point(class_scope:tensorflow.VersionDef)
79 | ))
80 | _sym_db.RegisterMessage(VersionDef)
81 |
82 |
83 | DESCRIPTOR.has_options = True
84 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\016VersionsProtosP\001\370\001\001'))
85 | # @@protoc_insertion_point(module_scope)
86 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/saved_model_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/saved_model.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..protobuf import meta_graph_pb2 as tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/protobuf/saved_model.proto',
21 | package='tensorflow',
22 | syntax='proto3',
23 | serialized_pb=_b('\n*tensorflow/core/protobuf/saved_model.proto\x12\ntensorflow\x1a)tensorflow/core/protobuf/meta_graph.proto\"_\n\nSavedModel\x12\"\n\x1asaved_model_schema_version\x18\x01 \x01(\x03\x12-\n\x0bmeta_graphs\x18\x02 \x03(\x0b\x32\x18.tensorflow.MetaGraphDefB1\n\x18org.tensorflow.frameworkB\x10SavedModelProtosP\x01\xf8\x01\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 | _SAVEDMODEL = _descriptor.Descriptor(
32 | name='SavedModel',
33 | full_name='tensorflow.SavedModel',
34 | filename=None,
35 | file=DESCRIPTOR,
36 | containing_type=None,
37 | fields=[
38 | _descriptor.FieldDescriptor(
39 | name='saved_model_schema_version', full_name='tensorflow.SavedModel.saved_model_schema_version', index=0,
40 | number=1, type=3, cpp_type=2, label=1,
41 | has_default_value=False, default_value=0,
42 | message_type=None, enum_type=None, containing_type=None,
43 | is_extension=False, extension_scope=None,
44 | options=None),
45 | _descriptor.FieldDescriptor(
46 | name='meta_graphs', full_name='tensorflow.SavedModel.meta_graphs', index=1,
47 | number=2, type=11, cpp_type=10, label=3,
48 | has_default_value=False, default_value=[],
49 | message_type=None, enum_type=None, containing_type=None,
50 | is_extension=False, extension_scope=None,
51 | options=None),
52 | ],
53 | extensions=[
54 | ],
55 | nested_types=[],
56 | enum_types=[
57 | ],
58 | options=None,
59 | is_extendable=False,
60 | syntax='proto3',
61 | extension_ranges=[],
62 | oneofs=[
63 | ],
64 | serialized_start=101,
65 | serialized_end=196,
66 | )
67 |
68 | _SAVEDMODEL.fields_by_name['meta_graphs'].message_type = tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2._METAGRAPHDEF
69 | DESCRIPTOR.message_types_by_name['SavedModel'] = _SAVEDMODEL
70 |
71 | SavedModel = _reflection.GeneratedProtocolMessageType('SavedModel', (_message.Message,), dict(
72 | DESCRIPTOR = _SAVEDMODEL,
73 | __module__ = 'tensorflow.core.protobuf.saved_model_pb2'
74 | # @@protoc_insertion_point(class_scope:tensorflow.SavedModel)
75 | ))
76 | _sym_db.RegisterMessage(SavedModel)
77 |
78 |
79 | DESCRIPTOR.has_options = True
80 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\020SavedModelProtosP\001\370\001\001'))
81 | # @@protoc_insertion_point(module_scope)
82 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/named_tensor_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/named_tensor.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..framework import tensor_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/protobuf/named_tensor.proto',
21 | package='tensorflow',
22 | syntax='proto3',
23 | serialized_pb=_b('\n+tensorflow/core/protobuf/named_tensor.proto\x12\ntensorflow\x1a&tensorflow/core/framework/tensor.proto\"I\n\x10NamedTensorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\'\n\x06tensor\x18\x02 \x01(\x0b\x32\x17.tensorflow.TensorProtoB2\n\x18org.tensorflow.frameworkB\x11NamedTensorProtosP\x01\xf8\x01\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_framework_dot_tensor__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 | _NAMEDTENSORPROTO = _descriptor.Descriptor(
32 | name='NamedTensorProto',
33 | full_name='tensorflow.NamedTensorProto',
34 | filename=None,
35 | file=DESCRIPTOR,
36 | containing_type=None,
37 | fields=[
38 | _descriptor.FieldDescriptor(
39 | name='name', full_name='tensorflow.NamedTensorProto.name', index=0,
40 | number=1, type=9, cpp_type=9, label=1,
41 | has_default_value=False, default_value=_b("").decode('utf-8'),
42 | message_type=None, enum_type=None, containing_type=None,
43 | is_extension=False, extension_scope=None,
44 | options=None),
45 | _descriptor.FieldDescriptor(
46 | name='tensor', full_name='tensorflow.NamedTensorProto.tensor', index=1,
47 | number=2, type=11, cpp_type=10, label=1,
48 | has_default_value=False, default_value=None,
49 | message_type=None, enum_type=None, containing_type=None,
50 | is_extension=False, extension_scope=None,
51 | options=None),
52 | ],
53 | extensions=[
54 | ],
55 | nested_types=[],
56 | enum_types=[
57 | ],
58 | options=None,
59 | is_extendable=False,
60 | syntax='proto3',
61 | extension_ranges=[],
62 | oneofs=[
63 | ],
64 | serialized_start=99,
65 | serialized_end=172,
66 | )
67 |
68 | _NAMEDTENSORPROTO.fields_by_name['tensor'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__pb2._TENSORPROTO
69 | DESCRIPTOR.message_types_by_name['NamedTensorProto'] = _NAMEDTENSORPROTO
70 |
71 | NamedTensorProto = _reflection.GeneratedProtocolMessageType('NamedTensorProto', (_message.Message,), dict(
72 | DESCRIPTOR = _NAMEDTENSORPROTO,
73 | __module__ = 'tensorflow.core.protobuf.named_tensor_pb2'
74 | # @@protoc_insertion_point(class_scope:tensorflow.NamedTensorProto)
75 | ))
76 | _sym_db.RegisterMessage(NamedTensorProto)
77 |
78 |
79 | DESCRIPTOR.has_options = True
80 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\021NamedTensorProtosP\001\370\001\001'))
81 | # @@protoc_insertion_point(module_scope)
82 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Microsoft Azure Machine Learning Hardware Accelerated Models Powered by Project Brainwave
2 |
3 | ## IMPORTANT!
4 | This service is now generally available, and this repo will be shut down. Please use the [updated notebooks](http://aka.ms/aml-accel-models-notebooks) and read the [updated documentation](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-deploy-fpga-web-service).
5 |
6 | Easily create and train a model using various deep neural networks (DNNs) as a featurizer for deployment on Azure for ultra-low latency inferencing. These models are currently available:
7 |
8 | * ResNet 50
9 | * ResNet 152
10 | * DenseNet-121
11 | * VGG-16
12 |
13 | ## How to get access
14 |
15 | Azure ML Hardware Accelerated Models is currently in preview.
16 |
17 | ### Step 1: Create an Azure ML workspace
18 |
19 | Follow [these instructions](https://docs.microsoft.com/en-us/azure/machine-learning/service/quickstart-create-workspace-with-python) to install the Azure ML SDK on your local machine, create an Azure ML workspace, and set up your notebook environment, which is required for the next step.
20 |
21 | **Note:** Only workspaces in the **East US 2** region are currently supported.
22 |
23 | Once you have set up your environment, install the contrib extras:
24 |
25 | ```sh
26 | pip install --upgrade azureml-sdk[contrib]
27 | ```
28 |
29 | Currently only tensorflow version<=1.10 is supported, so install it at the end:
30 |
31 | ```sh
32 | pip install "tensorflow==1.10"
33 | ```
34 |
35 | Go to the [documentation](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-deploy-fpga-web-service) page for any questions.
36 |
37 | ### Step 2: Deploy your service
38 |
39 | Check out the sample notebooks [here](https://aka.ms/aml-notebook-proj-brainwave).
40 |
41 | **Note:** You can deploy one FPGA service. If you want to deploy more than one service, you must [request quota](https://aka.ms/aml-real-time-ai-request) by submitting the form. You will need information from your workspace created in Step 1 ([learn how to get workspace information](docs/README.md)). You will receive an email if your quota request has been successful.
42 |
43 | ## Support
44 | Read the [docs](docs) or visit the [forum](https://aka.ms/aml-forum).
45 |
46 | # Contributing
47 |
48 | This project welcomes contributions and suggestions. Most contributions require you to agree to a
49 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
50 | the rights to use your contribution. For details, visit https://cla.microsoft.com.
51 |
52 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide
53 | a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions
54 | provided by the bot. You will only need to do this once across all repos using our CLA.
55 |
56 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
57 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
58 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
59 |
60 | ## Build Status
61 |
62 | System | Unit tests
63 | --- | ---
64 | Ubuntu 16.04 | [](https://dev.azure.com/coverste/aml-rt-ai/_build/latest?definitionId=1&branchName=master)
65 |
--------------------------------------------------------------------------------
/sample-clients/csharp/resnet/Program.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 | using System.Reflection;
6 | using System.Text;
7 | using System.Threading.Tasks;
8 | using CSharpClient;
9 | using Grpc.Core;
10 | using Newtonsoft.Json;
11 | using Tensorflow.Serving;
12 |
13 | namespace resnet
14 | {
15 | internal class Program
16 | {
17 | private static readonly string errorMessage = "" + Environment.NewLine + "dotnet resnet.dll [url(without port)] [path/to/local/image] ";
18 |
19 | private static int Main(string[] args)
20 | {
21 | return MainAsync(args).Result;
22 | }
23 |
24 | private static async Task MainAsync(string[] args)
25 | {
26 | if (args.Length < 2)
27 | {
28 | Console.WriteLine($"Use as {errorMessage}");
29 | return 1;
30 | }
31 |
32 | var host = args[0];
33 | var image = args[1];
34 |
35 | var useSSL = false;
36 |
37 | if (args.Length > 2)
38 | {
39 | var useSslString = args[2];
40 | var parsed = bool.TryParse(useSslString, out useSSL);
41 | Console.WriteLine(parsed ? "Using SSL" : "Not using SSL");
42 | }
43 |
44 | string auth = null;
45 | if (args.Length > 3 && useSSL)
46 | {
47 | auth = args[3];
48 | Console.WriteLine(!string.IsNullOrEmpty(auth) ? "Using auth" : "Not using auth");
49 | }
50 |
51 | var client = new ScoringClient(host, useSSL ? 443 : 80, useSSL, auth);
52 |
53 | using (var content = File.OpenRead(image))
54 | {
55 | IScoringRequest request = new ImageRequest(content);
56 | var result = await client.ScoreAsync(request);
57 | for (int i = 0; i < result.GetLength(0); i++)
58 | {
59 | Console.WriteLine($"Batch {i}:");
60 | var length = result.GetLength(1);
61 | var results = new Dictionary();
62 | for (int j = 0; j < length; j++)
63 | {
64 | results.Add(j, result[i, j]);
65 | }
66 |
67 | foreach (var kvp in results.Where(x => x.Value > 0.001).OrderByDescending(x => x.Value).Take(5))
68 | {
69 | Console.WriteLine(
70 | $" {GetLabel(kvp.Key)} {kvp.Value * 100}%");
71 | }
72 | }
73 | }
74 |
75 | return 0;
76 | }
77 |
78 | private static Dictionary _classes;
79 |
80 | private static string GetLabel(int classId)
81 | {
82 | if (_classes == null)
83 | {
84 | var assembly = typeof(Program).GetTypeInfo().Assembly;
85 | var result = assembly.GetManifestResourceStream("resnet.imagenet-classes.json");
86 |
87 | var streamReader = new StreamReader(result);
88 | var classesJson = streamReader.ReadToEnd();
89 |
90 | _classes = JsonConvert.DeserializeObject>(classesJson);
91 | }
92 |
93 | return _classes[classId];
94 | }
95 | }
96 | }
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow_serving/apis/model_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow_serving/apis/model.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow_serving/apis/model.proto',
21 | package='tensorflow.serving',
22 | syntax='proto3',
23 | serialized_pb=_b('\n#tensorflow_serving/apis/model.proto\x12\x12tensorflow.serving\x1a\x1egoogle/protobuf/wrappers.proto\"_\n\tModelSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x07version\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x16\n\x0esignature_name\x18\x03 \x01(\tB\x03\xf8\x01\x01\x62\x06proto3')
24 | ,
25 | dependencies=[google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 | _MODELSPEC = _descriptor.Descriptor(
32 | name='ModelSpec',
33 | full_name='tensorflow.serving.ModelSpec',
34 | filename=None,
35 | file=DESCRIPTOR,
36 | containing_type=None,
37 | fields=[
38 | _descriptor.FieldDescriptor(
39 | name='name', full_name='tensorflow.serving.ModelSpec.name', index=0,
40 | number=1, type=9, cpp_type=9, label=1,
41 | has_default_value=False, default_value=_b("").decode('utf-8'),
42 | message_type=None, enum_type=None, containing_type=None,
43 | is_extension=False, extension_scope=None,
44 | options=None),
45 | _descriptor.FieldDescriptor(
46 | name='version', full_name='tensorflow.serving.ModelSpec.version', index=1,
47 | number=2, type=11, cpp_type=10, label=1,
48 | has_default_value=False, default_value=None,
49 | message_type=None, enum_type=None, containing_type=None,
50 | is_extension=False, extension_scope=None,
51 | options=None),
52 | _descriptor.FieldDescriptor(
53 | name='signature_name', full_name='tensorflow.serving.ModelSpec.signature_name', index=2,
54 | number=3, type=9, cpp_type=9, label=1,
55 | has_default_value=False, default_value=_b("").decode('utf-8'),
56 | message_type=None, enum_type=None, containing_type=None,
57 | is_extension=False, extension_scope=None,
58 | options=None),
59 | ],
60 | extensions=[
61 | ],
62 | nested_types=[],
63 | enum_types=[
64 | ],
65 | options=None,
66 | is_extendable=False,
67 | syntax='proto3',
68 | extension_ranges=[],
69 | oneofs=[
70 | ],
71 | serialized_start=91,
72 | serialized_end=186,
73 | )
74 |
75 | _MODELSPEC.fields_by_name['version'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
76 | DESCRIPTOR.message_types_by_name['ModelSpec'] = _MODELSPEC
77 |
78 | ModelSpec = _reflection.GeneratedProtocolMessageType('ModelSpec', (_message.Message,), dict(
79 | DESCRIPTOR = _MODELSPEC,
80 | __module__ = 'tensorflow_serving.apis.model_pb2'
81 | # @@protoc_insertion_point(class_scope:tensorflow.serving.ModelSpec)
82 | ))
83 | _sym_db.RegisterMessage(ModelSpec)
84 |
85 |
86 | DESCRIPTOR.has_options = True
87 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
88 | # @@protoc_insertion_point(module_scope)
89 |
--------------------------------------------------------------------------------
/sample-clients/README.md:
--------------------------------------------------------------------------------
1 | # Sample clients
2 |
3 | Azure Machine Learning Hardware Accelerated Models implements part of the [Tensorflow Serving Predict API](https://github.com/tensorflow/serving/blob/r1.12/tensorflow_serving/apis/prediction_service.proto), which uses gRPC, an HTTP/2 RPC protocol. Specifically, we only support the `Predict` RPC call.
4 |
5 | ## Provided clients
6 | We provide sample clients for CSharp and Python. These clients are built on top of the code generated by the gRPC codegen tools.
7 |
8 | ### Using provided clients
9 | For optimal performance, you should reuse the client object between calls.
10 | #### C#
11 | See `csharp\resnet` for a sample console application that uses the provided C# client.
12 | #### Python
13 | ``` python
14 | from client import PredictionClient
15 | def run(ip_address, port, input_data):
16 | client = PredictionClient(ip_address, self.port, False, "")
17 | if isinstance(input_data, str):
18 | return client.score_image(input_data)
19 | if isinstance(input_data, np.ndarray):
20 | return client.score_numpy_array(input_data)
21 | return client.score_file(input_data.read())
22 | ```
23 |
24 | ## Generated Clients
25 | You can also generate clients for your prefered language by using the gRPC codegen tools. For more information - especially on using the generated clients - consult the [gRPC documentation.](https://grpc.io/docs/
26 | )
27 |
28 | ### Generating clients
29 | #### Requirements
30 | 1. Git
31 | 2. gRPC and its prerequesites for your desired language. To install these, follow the [quickstart instructions](https://grpc.io/docs/) for the desired language.
32 | #### Instructions
33 | 1. Clone [Tensorflow](https://github.com/tensorflow/tensorflow) and [Tensorflow-Serving](https://github.com/tensorflow/serving) to a directory.
34 | ```
35 | sample-clients> git clone https://github.com/tensorflow/tensorflow.git
36 | sample-clients> git clone https://github.com/tensorflow/serving.git
37 | ```
38 | 2. Checkout the appropriate version of the dependencies. (r1.12)
39 |
40 | ```
41 | sample-clients> cd tensorflow
42 | sample-clients/tensorflow> git fetch
43 | sample-clients/tensorflow> git checkout 1.12
44 | sample-clients/tensorflow> cd ../serving
45 | sample-clients/serving> git fetch
46 | sample-clients/serving> git checkout 1.12
47 | sample-clients/serving> cd ..
48 | ```
49 | 3. Run protoc with the grpc plugin to generate code for the desired language. Consult the GRPC docs for instructions on how to install and locate protoc and the grpc plugin.
50 |
51 | This example is for Golang
52 | ```
53 | sample-clients>protoc.exe -I tensorflow/ -I serving/ tensorflow/tensorflow/core/framework/tensor.proto tensorflow/tensorflow/core/framework/types.proto tensorflow/tensorflow/core/framework/resource_handle.proto tensorflow/tensorflow/core/framework/tensor_shape.proto --go_out=plugins=grpc:go
54 | sample-clients>protoc.exe -I tensorflow/ -I serving/ serving/tensorflow_serving/apis/predict.proto serving/tensorflow_serving/apis/model.proto serving/tensorflow_serving/apis/prediction_service.proto --go_out=plugins=grpc:go
55 | ```
56 |
57 | This will generate the go source code in `sample-clients\go`
58 | ### Using generated clients
59 | Using the generated clients is going to vary based on language, however the basic flow is:
60 | 1. Create a connection with the remote server
61 | 2. Create a prediction client with this connection
62 | 3. Construct the tensor(s) for input
63 | 4. Construct the predict request from the tensors
64 | 5. Make an RPC call with the request using the client
65 | 6. Read the response and consume it
66 |
67 | For an example of building on the generated code, look in `python/client.py`
68 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/reader_base_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/reader_base.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/reader_base.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n+tensorflow/core/framework/reader_base.proto\x12\ntensorflow\"r\n\x0fReaderBaseState\x12\x14\n\x0cwork_started\x18\x01 \x01(\x03\x12\x15\n\rwork_finished\x18\x02 \x01(\x03\x12\x1c\n\x14num_records_produced\x18\x03 \x01(\x03\x12\x14\n\x0c\x63urrent_work\x18\x04 \x01(\x0c\x42\x31\n\x18org.tensorflow.frameworkB\x10ReaderBaseProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _READERBASESTATE = _descriptor.Descriptor(
30 | name='ReaderBaseState',
31 | full_name='tensorflow.ReaderBaseState',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='work_started', full_name='tensorflow.ReaderBaseState.work_started', index=0,
38 | number=1, type=3, cpp_type=2, label=1,
39 | has_default_value=False, default_value=0,
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='work_finished', full_name='tensorflow.ReaderBaseState.work_finished', index=1,
45 | number=2, type=3, cpp_type=2, label=1,
46 | has_default_value=False, default_value=0,
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | _descriptor.FieldDescriptor(
51 | name='num_records_produced', full_name='tensorflow.ReaderBaseState.num_records_produced', index=2,
52 | number=3, type=3, cpp_type=2, label=1,
53 | has_default_value=False, default_value=0,
54 | message_type=None, enum_type=None, containing_type=None,
55 | is_extension=False, extension_scope=None,
56 | options=None),
57 | _descriptor.FieldDescriptor(
58 | name='current_work', full_name='tensorflow.ReaderBaseState.current_work', index=3,
59 | number=4, type=12, cpp_type=9, label=1,
60 | has_default_value=False, default_value=_b(""),
61 | message_type=None, enum_type=None, containing_type=None,
62 | is_extension=False, extension_scope=None,
63 | options=None),
64 | ],
65 | extensions=[
66 | ],
67 | nested_types=[],
68 | enum_types=[
69 | ],
70 | options=None,
71 | is_extendable=False,
72 | syntax='proto3',
73 | extension_ranges=[],
74 | oneofs=[
75 | ],
76 | serialized_start=59,
77 | serialized_end=173,
78 | )
79 |
80 | DESCRIPTOR.message_types_by_name['ReaderBaseState'] = _READERBASESTATE
81 |
82 | ReaderBaseState = _reflection.GeneratedProtocolMessageType('ReaderBaseState', (_message.Message,), dict(
83 | DESCRIPTOR = _READERBASESTATE,
84 | __module__ = 'tensorflow.core.framework.reader_base_pb2'
85 | # @@protoc_insertion_point(class_scope:tensorflow.ReaderBaseState)
86 | ))
87 | _sym_db.RegisterMessage(ReaderBaseState)
88 |
89 |
90 | DESCRIPTOR.has_options = True
91 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\020ReaderBaseProtosP\001\370\001\001'))
92 | # @@protoc_insertion_point(module_scope)
93 |
--------------------------------------------------------------------------------
/sample-clients/csharp/client/ScoringClient.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | using System;
5 | using System.Threading.Tasks;
6 | using Grpc.Core;
7 | using Tensorflow.Serving;
8 |
9 | namespace CSharpClient
10 | {
11 | using System.Collections.Generic;
12 | using System.Linq;
13 |
14 | public class ScoringClient
15 | {
16 | private const int RetryCount = 10;
17 |
18 | private readonly IPredictionServiceClient _client;
19 |
20 | public ScoringClient(IPredictionServiceClient client)
21 | {
22 | _client = client;
23 | }
24 |
25 | public ScoringClient(Channel channel) : this(new PredictionServiceClientWrapper(new PredictionService.PredictionServiceClient(channel)))
26 | {
27 | }
28 |
29 | public ScoringClient(string host, int port, bool useSsl = false, string authKey = null)
30 | {
31 | ChannelCredentials baseCreds, creds;
32 | baseCreds = useSsl ? new SslCredentials() : ChannelCredentials.Insecure;
33 | if (authKey != null && useSsl)
34 | {
35 | creds = ChannelCredentials.Create(baseCreds, CallCredentials.FromInterceptor(
36 | async (context, metadata) =>
37 | {
38 | metadata.Add(new Metadata.Entry("authorization", authKey));
39 | await Task.CompletedTask;
40 | }));
41 | }
42 | else
43 | {
44 | creds = baseCreds;
45 | }
46 | var channel = new Channel(host, port, creds);
47 | _client = new PredictionServiceClientWrapper(new PredictionService.PredictionServiceClient(channel));
48 | }
49 |
50 | public async Task ScoreAsync(IScoringRequest request, int retryCount = RetryCount, string output_name = "output_alias")
51 | {
52 | return await ScoreAsync(request, retryCount, output_name);
53 | }
54 |
55 | public async Task ScoreAsync(IScoringRequest request, int retryCount = RetryCount, string output_name = "output_alias") where T : class
56 | {
57 | return (await this.PredictAsync(request, retryCount))[output_name];
58 | }
59 |
60 | public async Task> PredictAsync(IScoringRequest request, int retryCount = RetryCount) where T : class
61 | {
62 | var predictRequest = request.MakePredictRequest();
63 |
64 | return await RetryAsync(async () =>
65 | {
66 | var result = await _client.PredictAsync(predictRequest);
67 | return result.Outputs.ToDictionary(
68 | kvp => kvp.Key, kvp => kvp.Value.Convert());
69 | }, retryCount);
70 | }
71 |
72 | private async Task RetryAsync(
73 | Func> operation, int retryCount = RetryCount
74 | )
75 | {
76 | while (true)
77 | {
78 | try
79 | {
80 | return await operation();
81 | }
82 | catch (RpcException rpcException)
83 | {
84 | if (!IsTransient(rpcException) || --retryCount <= 0)
85 | {
86 | throw;
87 | }
88 | }
89 | }
90 | }
91 |
92 | private static bool IsTransient(RpcException rpcException)
93 | {
94 | return
95 | rpcException.Status.StatusCode == StatusCode.DeadlineExceeded ||
96 | rpcException.Status.StatusCode == StatusCode.Unavailable ||
97 | rpcException.Status.StatusCode == StatusCode.Aborted ||
98 | rpcException.Status.StatusCode == StatusCode.Internal;
99 | }
100 | }
101 | }
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/resource_handle_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/resource_handle.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/resource_handle.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n/tensorflow/core/framework/resource_handle.proto\x12\ntensorflow\"r\n\x13ResourceHandleProto\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t\x12\x11\n\tcontainer\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x11\n\thash_code\x18\x04 \x01(\x04\x12\x17\n\x0fmaybe_type_name\x18\x05 \x01(\tB/\n\x18org.tensorflow.frameworkB\x0eResourceHandleP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _RESOURCEHANDLEPROTO = _descriptor.Descriptor(
30 | name='ResourceHandleProto',
31 | full_name='tensorflow.ResourceHandleProto',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='device', full_name='tensorflow.ResourceHandleProto.device', index=0,
38 | number=1, type=9, cpp_type=9, label=1,
39 | has_default_value=False, default_value=_b("").decode('utf-8'),
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='container', full_name='tensorflow.ResourceHandleProto.container', index=1,
45 | number=2, type=9, cpp_type=9, label=1,
46 | has_default_value=False, default_value=_b("").decode('utf-8'),
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | _descriptor.FieldDescriptor(
51 | name='name', full_name='tensorflow.ResourceHandleProto.name', index=2,
52 | number=3, type=9, cpp_type=9, label=1,
53 | has_default_value=False, default_value=_b("").decode('utf-8'),
54 | message_type=None, enum_type=None, containing_type=None,
55 | is_extension=False, extension_scope=None,
56 | options=None),
57 | _descriptor.FieldDescriptor(
58 | name='hash_code', full_name='tensorflow.ResourceHandleProto.hash_code', index=3,
59 | number=4, type=4, cpp_type=4, label=1,
60 | has_default_value=False, default_value=0,
61 | message_type=None, enum_type=None, containing_type=None,
62 | is_extension=False, extension_scope=None,
63 | options=None),
64 | _descriptor.FieldDescriptor(
65 | name='maybe_type_name', full_name='tensorflow.ResourceHandleProto.maybe_type_name', index=4,
66 | number=5, type=9, cpp_type=9, label=1,
67 | has_default_value=False, default_value=_b("").decode('utf-8'),
68 | message_type=None, enum_type=None, containing_type=None,
69 | is_extension=False, extension_scope=None,
70 | options=None),
71 | ],
72 | extensions=[
73 | ],
74 | nested_types=[],
75 | enum_types=[
76 | ],
77 | options=None,
78 | is_extendable=False,
79 | syntax='proto3',
80 | extension_ranges=[],
81 | oneofs=[
82 | ],
83 | serialized_start=63,
84 | serialized_end=177,
85 | )
86 |
87 | DESCRIPTOR.message_types_by_name['ResourceHandleProto'] = _RESOURCEHANDLEPROTO
88 |
89 | ResourceHandleProto = _reflection.GeneratedProtocolMessageType('ResourceHandleProto', (_message.Message,), dict(
90 | DESCRIPTOR = _RESOURCEHANDLEPROTO,
91 | __module__ = 'tensorflow.core.framework.resource_handle_pb2'
92 | # @@protoc_insertion_point(class_scope:tensorflow.ResourceHandleProto)
93 | ))
94 | _sym_db.RegisterMessage(ResourceHandleProto)
95 |
96 |
97 | DESCRIPTOR.has_options = True
98 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\016ResourceHandleP\001\370\001\001'))
99 | # @@protoc_insertion_point(module_scope)
100 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/tensor_description_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/tensor_description.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2
17 | from ..framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2
18 | from ..framework import allocation_description_pb2 as tensorflow_dot_core_dot_framework_dot_allocation__description__pb2
19 |
20 |
21 | DESCRIPTOR = _descriptor.FileDescriptor(
22 | name='tensorflow/core/framework/tensor_description.proto',
23 | package='tensorflow',
24 | syntax='proto3',
25 | serialized_pb=_b('\n2tensorflow/core/framework/tensor_description.proto\x12\ntensorflow\x1a%tensorflow/core/framework/types.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a\x36tensorflow/core/framework/allocation_description.proto\"\xa8\x01\n\x11TensorDescription\x12#\n\x05\x64type\x18\x01 \x01(\x0e\x32\x14.tensorflow.DataType\x12+\n\x05shape\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12\x41\n\x16\x61llocation_description\x18\x04 \x01(\x0b\x32!.tensorflow.AllocationDescriptionB8\n\x18org.tensorflow.frameworkB\x17TensorDescriptionProtosP\x01\xf8\x01\x01\x62\x06proto3')
26 | ,
27 | dependencies=[tensorflow_dot_core_dot_framework_dot_types__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_allocation__description__pb2.DESCRIPTOR,])
28 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
29 |
30 |
31 |
32 |
33 | _TENSORDESCRIPTION = _descriptor.Descriptor(
34 | name='TensorDescription',
35 | full_name='tensorflow.TensorDescription',
36 | filename=None,
37 | file=DESCRIPTOR,
38 | containing_type=None,
39 | fields=[
40 | _descriptor.FieldDescriptor(
41 | name='dtype', full_name='tensorflow.TensorDescription.dtype', index=0,
42 | number=1, type=14, cpp_type=8, label=1,
43 | has_default_value=False, default_value=0,
44 | message_type=None, enum_type=None, containing_type=None,
45 | is_extension=False, extension_scope=None,
46 | options=None),
47 | _descriptor.FieldDescriptor(
48 | name='shape', full_name='tensorflow.TensorDescription.shape', index=1,
49 | number=2, type=11, cpp_type=10, label=1,
50 | has_default_value=False, default_value=None,
51 | message_type=None, enum_type=None, containing_type=None,
52 | is_extension=False, extension_scope=None,
53 | options=None),
54 | _descriptor.FieldDescriptor(
55 | name='allocation_description', full_name='tensorflow.TensorDescription.allocation_description', index=2,
56 | number=4, type=11, cpp_type=10, label=1,
57 | has_default_value=False, default_value=None,
58 | message_type=None, enum_type=None, containing_type=None,
59 | is_extension=False, extension_scope=None,
60 | options=None),
61 | ],
62 | extensions=[
63 | ],
64 | nested_types=[],
65 | enum_types=[
66 | ],
67 | options=None,
68 | is_extendable=False,
69 | syntax='proto3',
70 | extension_ranges=[],
71 | oneofs=[
72 | ],
73 | serialized_start=208,
74 | serialized_end=376,
75 | )
76 |
77 | _TENSORDESCRIPTION.fields_by_name['dtype'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE
78 | _TENSORDESCRIPTION.fields_by_name['shape'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO
79 | _TENSORDESCRIPTION.fields_by_name['allocation_description'].message_type = tensorflow_dot_core_dot_framework_dot_allocation__description__pb2._ALLOCATIONDESCRIPTION
80 | DESCRIPTOR.message_types_by_name['TensorDescription'] = _TENSORDESCRIPTION
81 |
82 | TensorDescription = _reflection.GeneratedProtocolMessageType('TensorDescription', (_message.Message,), dict(
83 | DESCRIPTOR = _TENSORDESCRIPTION,
84 | __module__ = 'tensorflow.core.framework.tensor_description_pb2'
85 | # @@protoc_insertion_point(class_scope:tensorflow.TensorDescription)
86 | ))
87 | _sym_db.RegisterMessage(TensorDescription)
88 |
89 |
90 | DESCRIPTOR.has_options = True
91 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\027TensorDescriptionProtosP\001\370\001\001'))
92 | # @@protoc_insertion_point(module_scope)
93 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/client.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | import numpy as np
4 | import tensorflow as tf
5 | import tensorflow.contrib
6 | import grpc
7 | import time
8 | from datetime import datetime, timedelta
9 |
10 | try:
11 | from tensorflow_serving.apis import predict_pb2
12 | from tensorflow_serving.apis import prediction_service_pb2_grpc
13 | except ImportError:
14 | from .external.tensorflow_serving.apis import predict_pb2
15 | from .external.tensorflow_serving.apis import prediction_service_pb2_grpc
16 |
17 | try:
18 | from tensorflow.core.framework import tensor_shape_pb2
19 | from tensorflow.core.framework import types_pb2
20 | except ImportError:
21 | from .external.tensorflow.core.framework import tensor_shape_pb2
22 | from .external.tensorflow.core.framework import types_pb2
23 |
24 | class PredictionClient:
25 |
26 | def __init__(self, address: str, port: int, use_ssl:bool = False, access_token:str = "", channel_shutdown_timeout:timedelta = timedelta(minutes=2)):
27 | if(address is None):
28 | raise ValueError("address")
29 |
30 | if(port is None):
31 | raise ValueError("port")
32 |
33 | host = "{0}:{1}".format(address, port)
34 | metadata_transormer = (lambda x:[('authorization', access_token)])
35 | grpc.composite_channel_credentials(grpc.ssl_channel_credentials(),
36 | grpc.metadata_call_credentials(metadata_transormer))
37 |
38 | if use_ssl:
39 | self._channel_func = lambda: grpc.secure_channel(host, grpc.ssl_channel_credentials())
40 | else:
41 | self._channel_func = lambda: grpc.insecure_channel(host)
42 |
43 | self.__channel_shutdown_timeout = channel_shutdown_timeout
44 | self.__channel_usable_until = None
45 | self.__channel = None
46 |
47 |
48 | def score_numpy_array(self, npdata):
49 | request = predict_pb2.PredictRequest()
50 | request.inputs['images'].CopyFrom(tf.contrib.util.make_tensor_proto(npdata, types_pb2.DT_FLOAT, npdata.shape))
51 | result_tensor = self.__predict(request, 30.0)
52 | return tf.contrib.util.make_ndarray(result_tensor)
53 |
54 | def score_image(self, path: str, timeout: float = 10.0):
55 | with open(path, 'rb') as f:
56 | data = f.read()
57 | result = self.score_tensor(data, [1], types_pb2.DT_STRING, timeout) #7 is dt_string
58 | result_ndarray = tf.contrib.util.make_ndarray(result)
59 | # result is a batch, but the API only allows a single image so we return the
60 | # single item of the batch here
61 | return result_ndarray[0]
62 |
63 |
64 | @staticmethod
65 | def make_dim_list(shape:list):
66 | ret_list = []
67 | for val in shape:
68 | dim = tensor_shape_pb2.TensorShapeProto.Dim()
69 | dim.size=val
70 | ret_list.append(dim)
71 | return ret_list
72 |
73 | def score_tensor(self, data: bytes, shape: list, datatype, timeout: float = 10.0):
74 | request = predict_pb2.PredictRequest()
75 | request.inputs['images'].string_val.append(data)
76 | request.inputs['images'].dtype = datatype
77 | request.inputs['images'].tensor_shape.dim.extend(self.make_dim_list(shape))
78 | return self.__predict(request, timeout)
79 |
80 | def _get_datetime_now(self):
81 | return datetime.now()
82 |
83 | def _get_grpc_stub(self):
84 | if(self.__channel_usable_until is None or self.__channel_usable_until < self._get_datetime_now()):
85 | self.__reinitialize_channel()
86 | self.__channel_usable_until = self._get_datetime_now() + self.__channel_shutdown_timeout
87 | return self.__stub
88 |
89 | def __predict(self, request, timeout):
90 | retry_count = 5
91 | sleep_delay = 1
92 |
93 | while(True):
94 | try:
95 | result = self._get_grpc_stub().Predict(request, timeout)
96 | return result.outputs["output_alias"]
97 | except grpc.RpcError as rpcError:
98 | retry_count = retry_count - 1
99 | if(retry_count <= 0):
100 | raise
101 | time.sleep(sleep_delay)
102 | sleep_delay = sleep_delay * 2
103 | print("Retrying", rpcError)
104 | self.__reinitialize_channel()
105 |
106 | def __reinitialize_channel(self):
107 | self.__stub = None
108 | if self.__channel is not None:
109 | self.__channel.close()
110 | self.__channel = self._channel_func()
111 | self.__stub = prediction_service_pb2_grpc.PredictionServiceStub(self.__channel)
112 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/critical_section_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/critical_section.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/protobuf/critical_section.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n/tensorflow/core/protobuf/critical_section.proto\x12\ntensorflow\"3\n\x12\x43riticalSectionDef\x12\x1d\n\x15\x63ritical_section_name\x18\x01 \x01(\t\"j\n\x1b\x43riticalSectionExecutionDef\x12(\n execute_in_critical_section_name\x18\x01 \x01(\t\x12!\n\x19\x65xclusive_resource_access\x18\x02 \x01(\x08\x42\x36\n\x18org.tensorflow.frameworkB\x15\x43riticalSectionProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _CRITICALSECTIONDEF = _descriptor.Descriptor(
30 | name='CriticalSectionDef',
31 | full_name='tensorflow.CriticalSectionDef',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='critical_section_name', full_name='tensorflow.CriticalSectionDef.critical_section_name', index=0,
38 | number=1, type=9, cpp_type=9, label=1,
39 | has_default_value=False, default_value=_b("").decode('utf-8'),
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | ],
44 | extensions=[
45 | ],
46 | nested_types=[],
47 | enum_types=[
48 | ],
49 | options=None,
50 | is_extendable=False,
51 | syntax='proto3',
52 | extension_ranges=[],
53 | oneofs=[
54 | ],
55 | serialized_start=63,
56 | serialized_end=114,
57 | )
58 |
59 |
60 | _CRITICALSECTIONEXECUTIONDEF = _descriptor.Descriptor(
61 | name='CriticalSectionExecutionDef',
62 | full_name='tensorflow.CriticalSectionExecutionDef',
63 | filename=None,
64 | file=DESCRIPTOR,
65 | containing_type=None,
66 | fields=[
67 | _descriptor.FieldDescriptor(
68 | name='execute_in_critical_section_name', full_name='tensorflow.CriticalSectionExecutionDef.execute_in_critical_section_name', index=0,
69 | number=1, type=9, cpp_type=9, label=1,
70 | has_default_value=False, default_value=_b("").decode('utf-8'),
71 | message_type=None, enum_type=None, containing_type=None,
72 | is_extension=False, extension_scope=None,
73 | options=None),
74 | _descriptor.FieldDescriptor(
75 | name='exclusive_resource_access', full_name='tensorflow.CriticalSectionExecutionDef.exclusive_resource_access', index=1,
76 | number=2, type=8, cpp_type=7, label=1,
77 | has_default_value=False, default_value=False,
78 | message_type=None, enum_type=None, containing_type=None,
79 | is_extension=False, extension_scope=None,
80 | options=None),
81 | ],
82 | extensions=[
83 | ],
84 | nested_types=[],
85 | enum_types=[
86 | ],
87 | options=None,
88 | is_extendable=False,
89 | syntax='proto3',
90 | extension_ranges=[],
91 | oneofs=[
92 | ],
93 | serialized_start=116,
94 | serialized_end=222,
95 | )
96 |
97 | DESCRIPTOR.message_types_by_name['CriticalSectionDef'] = _CRITICALSECTIONDEF
98 | DESCRIPTOR.message_types_by_name['CriticalSectionExecutionDef'] = _CRITICALSECTIONEXECUTIONDEF
99 |
100 | CriticalSectionDef = _reflection.GeneratedProtocolMessageType('CriticalSectionDef', (_message.Message,), dict(
101 | DESCRIPTOR = _CRITICALSECTIONDEF,
102 | __module__ = 'tensorflow.core.protobuf.critical_section_pb2'
103 | # @@protoc_insertion_point(class_scope:tensorflow.CriticalSectionDef)
104 | ))
105 | _sym_db.RegisterMessage(CriticalSectionDef)
106 |
107 | CriticalSectionExecutionDef = _reflection.GeneratedProtocolMessageType('CriticalSectionExecutionDef', (_message.Message,), dict(
108 | DESCRIPTOR = _CRITICALSECTIONEXECUTIONDEF,
109 | __module__ = 'tensorflow.core.protobuf.critical_section_pb2'
110 | # @@protoc_insertion_point(class_scope:tensorflow.CriticalSectionExecutionDef)
111 | ))
112 | _sym_db.RegisterMessage(CriticalSectionExecutionDef)
113 |
114 |
115 | DESCRIPTOR.has_options = True
116 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\025CriticalSectionProtosP\001\370\001\001'))
117 | # @@protoc_insertion_point(module_scope)
118 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/queue_runner_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/queue_runner.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..lib.core import error_codes_pb2 as tensorflow_dot_core_dot_lib_dot_core_dot_error__codes__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/protobuf/queue_runner.proto',
21 | package='tensorflow',
22 | syntax='proto3',
23 | serialized_pb=_b('\n+tensorflow/core/protobuf/queue_runner.proto\x12\ntensorflow\x1a*tensorflow/core/lib/core/error_codes.proto\"\xaa\x01\n\x0eQueueRunnerDef\x12\x12\n\nqueue_name\x18\x01 \x01(\t\x12\x17\n\x0f\x65nqueue_op_name\x18\x02 \x03(\t\x12\x15\n\rclose_op_name\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61ncel_op_name\x18\x04 \x01(\t\x12<\n\x1cqueue_closed_exception_types\x18\x05 \x03(\x0e\x32\x16.tensorflow.error.CodeB2\n\x18org.tensorflow.frameworkB\x11QueueRunnerProtosP\x01\xf8\x01\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_lib_dot_core_dot_error__codes__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 | _QUEUERUNNERDEF = _descriptor.Descriptor(
32 | name='QueueRunnerDef',
33 | full_name='tensorflow.QueueRunnerDef',
34 | filename=None,
35 | file=DESCRIPTOR,
36 | containing_type=None,
37 | fields=[
38 | _descriptor.FieldDescriptor(
39 | name='queue_name', full_name='tensorflow.QueueRunnerDef.queue_name', index=0,
40 | number=1, type=9, cpp_type=9, label=1,
41 | has_default_value=False, default_value=_b("").decode('utf-8'),
42 | message_type=None, enum_type=None, containing_type=None,
43 | is_extension=False, extension_scope=None,
44 | options=None),
45 | _descriptor.FieldDescriptor(
46 | name='enqueue_op_name', full_name='tensorflow.QueueRunnerDef.enqueue_op_name', index=1,
47 | number=2, type=9, cpp_type=9, label=3,
48 | has_default_value=False, default_value=[],
49 | message_type=None, enum_type=None, containing_type=None,
50 | is_extension=False, extension_scope=None,
51 | options=None),
52 | _descriptor.FieldDescriptor(
53 | name='close_op_name', full_name='tensorflow.QueueRunnerDef.close_op_name', index=2,
54 | number=3, type=9, cpp_type=9, label=1,
55 | has_default_value=False, default_value=_b("").decode('utf-8'),
56 | message_type=None, enum_type=None, containing_type=None,
57 | is_extension=False, extension_scope=None,
58 | options=None),
59 | _descriptor.FieldDescriptor(
60 | name='cancel_op_name', full_name='tensorflow.QueueRunnerDef.cancel_op_name', index=3,
61 | number=4, type=9, cpp_type=9, label=1,
62 | has_default_value=False, default_value=_b("").decode('utf-8'),
63 | message_type=None, enum_type=None, containing_type=None,
64 | is_extension=False, extension_scope=None,
65 | options=None),
66 | _descriptor.FieldDescriptor(
67 | name='queue_closed_exception_types', full_name='tensorflow.QueueRunnerDef.queue_closed_exception_types', index=4,
68 | number=5, type=14, cpp_type=8, label=3,
69 | has_default_value=False, default_value=[],
70 | message_type=None, enum_type=None, containing_type=None,
71 | is_extension=False, extension_scope=None,
72 | options=None),
73 | ],
74 | extensions=[
75 | ],
76 | nested_types=[],
77 | enum_types=[
78 | ],
79 | options=None,
80 | is_extendable=False,
81 | syntax='proto3',
82 | extension_ranges=[],
83 | oneofs=[
84 | ],
85 | serialized_start=104,
86 | serialized_end=274,
87 | )
88 |
89 | _QUEUERUNNERDEF.fields_by_name['queue_closed_exception_types'].enum_type = tensorflow_dot_core_dot_lib_dot_core_dot_error__codes__pb2._CODE
90 | DESCRIPTOR.message_types_by_name['QueueRunnerDef'] = _QUEUERUNNERDEF
91 |
92 | QueueRunnerDef = _reflection.GeneratedProtocolMessageType('QueueRunnerDef', (_message.Message,), dict(
93 | DESCRIPTOR = _QUEUERUNNERDEF,
94 | __module__ = 'tensorflow.core.protobuf.queue_runner_pb2'
95 | # @@protoc_insertion_point(class_scope:tensorflow.QueueRunnerDef)
96 | ))
97 | _sym_db.RegisterMessage(QueueRunnerDef)
98 |
99 |
100 | DESCRIPTOR.has_options = True
101 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\021QueueRunnerProtosP\001\370\001\001'))
102 | # @@protoc_insertion_point(module_scope)
103 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/example/example_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/example/example.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..example import feature_pb2 as tensorflow_dot_core_dot_example_dot_feature__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/example/example.proto',
21 | package='tensorflow',
22 | syntax='proto3',
23 | serialized_pb=_b('\n%tensorflow/core/example/example.proto\x12\ntensorflow\x1a%tensorflow/core/example/feature.proto\"1\n\x07\x45xample\x12&\n\x08\x66\x65\x61tures\x18\x01 \x01(\x0b\x32\x14.tensorflow.Features\"i\n\x0fSequenceExample\x12%\n\x07\x63ontext\x18\x01 \x01(\x0b\x32\x14.tensorflow.Features\x12/\n\rfeature_lists\x18\x02 \x01(\x0b\x32\x18.tensorflow.FeatureListsB,\n\x16org.tensorflow.exampleB\rExampleProtosP\x01\xf8\x01\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_example_dot_feature__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 | _EXAMPLE = _descriptor.Descriptor(
32 | name='Example',
33 | full_name='tensorflow.Example',
34 | filename=None,
35 | file=DESCRIPTOR,
36 | containing_type=None,
37 | fields=[
38 | _descriptor.FieldDescriptor(
39 | name='features', full_name='tensorflow.Example.features', index=0,
40 | number=1, type=11, cpp_type=10, label=1,
41 | has_default_value=False, default_value=None,
42 | message_type=None, enum_type=None, containing_type=None,
43 | is_extension=False, extension_scope=None,
44 | options=None),
45 | ],
46 | extensions=[
47 | ],
48 | nested_types=[],
49 | enum_types=[
50 | ],
51 | options=None,
52 | is_extendable=False,
53 | syntax='proto3',
54 | extension_ranges=[],
55 | oneofs=[
56 | ],
57 | serialized_start=92,
58 | serialized_end=141,
59 | )
60 |
61 |
62 | _SEQUENCEEXAMPLE = _descriptor.Descriptor(
63 | name='SequenceExample',
64 | full_name='tensorflow.SequenceExample',
65 | filename=None,
66 | file=DESCRIPTOR,
67 | containing_type=None,
68 | fields=[
69 | _descriptor.FieldDescriptor(
70 | name='context', full_name='tensorflow.SequenceExample.context', index=0,
71 | number=1, type=11, cpp_type=10, label=1,
72 | has_default_value=False, default_value=None,
73 | message_type=None, enum_type=None, containing_type=None,
74 | is_extension=False, extension_scope=None,
75 | options=None),
76 | _descriptor.FieldDescriptor(
77 | name='feature_lists', full_name='tensorflow.SequenceExample.feature_lists', index=1,
78 | number=2, type=11, cpp_type=10, label=1,
79 | has_default_value=False, default_value=None,
80 | message_type=None, enum_type=None, containing_type=None,
81 | is_extension=False, extension_scope=None,
82 | options=None),
83 | ],
84 | extensions=[
85 | ],
86 | nested_types=[],
87 | enum_types=[
88 | ],
89 | options=None,
90 | is_extendable=False,
91 | syntax='proto3',
92 | extension_ranges=[],
93 | oneofs=[
94 | ],
95 | serialized_start=143,
96 | serialized_end=248,
97 | )
98 |
99 | _EXAMPLE.fields_by_name['features'].message_type = tensorflow_dot_core_dot_example_dot_feature__pb2._FEATURES
100 | _SEQUENCEEXAMPLE.fields_by_name['context'].message_type = tensorflow_dot_core_dot_example_dot_feature__pb2._FEATURES
101 | _SEQUENCEEXAMPLE.fields_by_name['feature_lists'].message_type = tensorflow_dot_core_dot_example_dot_feature__pb2._FEATURELISTS
102 | DESCRIPTOR.message_types_by_name['Example'] = _EXAMPLE
103 | DESCRIPTOR.message_types_by_name['SequenceExample'] = _SEQUENCEEXAMPLE
104 |
105 | Example = _reflection.GeneratedProtocolMessageType('Example', (_message.Message,), dict(
106 | DESCRIPTOR = _EXAMPLE,
107 | __module__ = 'tensorflow.core.example.example_pb2'
108 | # @@protoc_insertion_point(class_scope:tensorflow.Example)
109 | ))
110 | _sym_db.RegisterMessage(Example)
111 |
112 | SequenceExample = _reflection.GeneratedProtocolMessageType('SequenceExample', (_message.Message,), dict(
113 | DESCRIPTOR = _SEQUENCEEXAMPLE,
114 | __module__ = 'tensorflow.core.example.example_pb2'
115 | # @@protoc_insertion_point(class_scope:tensorflow.SequenceExample)
116 | ))
117 | _sym_db.RegisterMessage(SequenceExample)
118 |
119 |
120 | DESCRIPTOR.has_options = True
121 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026org.tensorflow.exampleB\rExampleProtosP\001\370\001\001'))
122 | # @@protoc_insertion_point(module_scope)
123 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/tensor_shape_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/tensor_shape.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/tensor_shape.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n,tensorflow/core/framework/tensor_shape.proto\x12\ntensorflow\"z\n\x10TensorShapeProto\x12-\n\x03\x64im\x18\x02 \x03(\x0b\x32 .tensorflow.TensorShapeProto.Dim\x12\x14\n\x0cunknown_rank\x18\x03 \x01(\x08\x1a!\n\x03\x44im\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\tB2\n\x18org.tensorflow.frameworkB\x11TensorShapeProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _TENSORSHAPEPROTO_DIM = _descriptor.Descriptor(
30 | name='Dim',
31 | full_name='tensorflow.TensorShapeProto.Dim',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='size', full_name='tensorflow.TensorShapeProto.Dim.size', index=0,
38 | number=1, type=3, cpp_type=2, label=1,
39 | has_default_value=False, default_value=0,
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='name', full_name='tensorflow.TensorShapeProto.Dim.name', index=1,
45 | number=2, type=9, cpp_type=9, label=1,
46 | has_default_value=False, default_value=_b("").decode('utf-8'),
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | ],
51 | extensions=[
52 | ],
53 | nested_types=[],
54 | enum_types=[
55 | ],
56 | options=None,
57 | is_extendable=False,
58 | syntax='proto3',
59 | extension_ranges=[],
60 | oneofs=[
61 | ],
62 | serialized_start=149,
63 | serialized_end=182,
64 | )
65 |
66 | _TENSORSHAPEPROTO = _descriptor.Descriptor(
67 | name='TensorShapeProto',
68 | full_name='tensorflow.TensorShapeProto',
69 | filename=None,
70 | file=DESCRIPTOR,
71 | containing_type=None,
72 | fields=[
73 | _descriptor.FieldDescriptor(
74 | name='dim', full_name='tensorflow.TensorShapeProto.dim', index=0,
75 | number=2, type=11, cpp_type=10, label=3,
76 | has_default_value=False, default_value=[],
77 | message_type=None, enum_type=None, containing_type=None,
78 | is_extension=False, extension_scope=None,
79 | options=None),
80 | _descriptor.FieldDescriptor(
81 | name='unknown_rank', full_name='tensorflow.TensorShapeProto.unknown_rank', index=1,
82 | number=3, type=8, cpp_type=7, label=1,
83 | has_default_value=False, default_value=False,
84 | message_type=None, enum_type=None, containing_type=None,
85 | is_extension=False, extension_scope=None,
86 | options=None),
87 | ],
88 | extensions=[
89 | ],
90 | nested_types=[_TENSORSHAPEPROTO_DIM, ],
91 | enum_types=[
92 | ],
93 | options=None,
94 | is_extendable=False,
95 | syntax='proto3',
96 | extension_ranges=[],
97 | oneofs=[
98 | ],
99 | serialized_start=60,
100 | serialized_end=182,
101 | )
102 |
103 | _TENSORSHAPEPROTO_DIM.containing_type = _TENSORSHAPEPROTO
104 | _TENSORSHAPEPROTO.fields_by_name['dim'].message_type = _TENSORSHAPEPROTO_DIM
105 | DESCRIPTOR.message_types_by_name['TensorShapeProto'] = _TENSORSHAPEPROTO
106 |
107 | TensorShapeProto = _reflection.GeneratedProtocolMessageType('TensorShapeProto', (_message.Message,), dict(
108 |
109 | Dim = _reflection.GeneratedProtocolMessageType('Dim', (_message.Message,), dict(
110 | DESCRIPTOR = _TENSORSHAPEPROTO_DIM,
111 | __module__ = 'tensorflow.core.framework.tensor_shape_pb2'
112 | # @@protoc_insertion_point(class_scope:tensorflow.TensorShapeProto.Dim)
113 | ))
114 | ,
115 | DESCRIPTOR = _TENSORSHAPEPROTO,
116 | __module__ = 'tensorflow.core.framework.tensor_shape_pb2'
117 | # @@protoc_insertion_point(class_scope:tensorflow.TensorShapeProto)
118 | ))
119 | _sym_db.RegisterMessage(TensorShapeProto)
120 | _sym_db.RegisterMessage(TensorShapeProto.Dim)
121 |
122 |
123 | DESCRIPTOR.has_options = True
124 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\021TensorShapeProtosP\001\370\001\001'))
125 | # @@protoc_insertion_point(module_scope)
126 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/allocation_description_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/allocation_description.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/allocation_description.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n6tensorflow/core/framework/allocation_description.proto\x12\ntensorflow\"\xa3\x01\n\x15\x41llocationDescription\x12\x17\n\x0frequested_bytes\x18\x01 \x01(\x03\x12\x17\n\x0f\x61llocated_bytes\x18\x02 \x01(\x03\x12\x16\n\x0e\x61llocator_name\x18\x03 \x01(\t\x12\x15\n\rallocation_id\x18\x04 \x01(\x03\x12\x1c\n\x14has_single_reference\x18\x05 \x01(\x08\x12\x0b\n\x03ptr\x18\x06 \x01(\x04\x42<\n\x18org.tensorflow.frameworkB\x1b\x41llocationDescriptionProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _ALLOCATIONDESCRIPTION = _descriptor.Descriptor(
30 | name='AllocationDescription',
31 | full_name='tensorflow.AllocationDescription',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='requested_bytes', full_name='tensorflow.AllocationDescription.requested_bytes', index=0,
38 | number=1, type=3, cpp_type=2, label=1,
39 | has_default_value=False, default_value=0,
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='allocated_bytes', full_name='tensorflow.AllocationDescription.allocated_bytes', index=1,
45 | number=2, type=3, cpp_type=2, label=1,
46 | has_default_value=False, default_value=0,
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | _descriptor.FieldDescriptor(
51 | name='allocator_name', full_name='tensorflow.AllocationDescription.allocator_name', index=2,
52 | number=3, type=9, cpp_type=9, label=1,
53 | has_default_value=False, default_value=_b("").decode('utf-8'),
54 | message_type=None, enum_type=None, containing_type=None,
55 | is_extension=False, extension_scope=None,
56 | options=None),
57 | _descriptor.FieldDescriptor(
58 | name='allocation_id', full_name='tensorflow.AllocationDescription.allocation_id', index=3,
59 | number=4, type=3, cpp_type=2, label=1,
60 | has_default_value=False, default_value=0,
61 | message_type=None, enum_type=None, containing_type=None,
62 | is_extension=False, extension_scope=None,
63 | options=None),
64 | _descriptor.FieldDescriptor(
65 | name='has_single_reference', full_name='tensorflow.AllocationDescription.has_single_reference', index=4,
66 | number=5, type=8, cpp_type=7, label=1,
67 | has_default_value=False, default_value=False,
68 | message_type=None, enum_type=None, containing_type=None,
69 | is_extension=False, extension_scope=None,
70 | options=None),
71 | _descriptor.FieldDescriptor(
72 | name='ptr', full_name='tensorflow.AllocationDescription.ptr', index=5,
73 | number=6, type=4, cpp_type=4, label=1,
74 | has_default_value=False, default_value=0,
75 | message_type=None, enum_type=None, containing_type=None,
76 | is_extension=False, extension_scope=None,
77 | options=None),
78 | ],
79 | extensions=[
80 | ],
81 | nested_types=[],
82 | enum_types=[
83 | ],
84 | options=None,
85 | is_extendable=False,
86 | syntax='proto3',
87 | extension_ranges=[],
88 | oneofs=[
89 | ],
90 | serialized_start=71,
91 | serialized_end=234,
92 | )
93 |
94 | DESCRIPTOR.message_types_by_name['AllocationDescription'] = _ALLOCATIONDESCRIPTION
95 |
96 | AllocationDescription = _reflection.GeneratedProtocolMessageType('AllocationDescription', (_message.Message,), dict(
97 | DESCRIPTOR = _ALLOCATIONDESCRIPTION,
98 | __module__ = 'tensorflow.core.framework.allocation_description_pb2'
99 | # @@protoc_insertion_point(class_scope:tensorflow.AllocationDescription)
100 | ))
101 | _sym_db.RegisterMessage(AllocationDescription)
102 |
103 |
104 | DESCRIPTOR.has_options = True
105 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\033AllocationDescriptionProtosP\001\370\001\001'))
106 | # @@protoc_insertion_point(module_scope)
107 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/tensorflow_server_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/tensorflow_server.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..protobuf import config_pb2 as tensorflow_dot_core_dot_protobuf_dot_config__pb2
17 | from ..protobuf import cluster_pb2 as tensorflow_dot_core_dot_protobuf_dot_cluster__pb2
18 |
19 |
20 | DESCRIPTOR = _descriptor.FileDescriptor(
21 | name='tensorflow/core/protobuf/tensorflow_server.proto',
22 | package='tensorflow',
23 | syntax='proto3',
24 | serialized_pb=_b('\n0tensorflow/core/protobuf/tensorflow_server.proto\x12\ntensorflow\x1a%tensorflow/core/protobuf/config.proto\x1a&tensorflow/core/protobuf/cluster.proto\"\xa5\x01\n\tServerDef\x12\'\n\x07\x63luster\x18\x01 \x01(\x0b\x32\x16.tensorflow.ClusterDef\x12\x10\n\x08job_name\x18\x02 \x01(\t\x12\x12\n\ntask_index\x18\x03 \x01(\x05\x12\x37\n\x16\x64\x65\x66\x61ult_session_config\x18\x04 \x01(\x0b\x32\x17.tensorflow.ConfigProto\x12\x10\n\x08protocol\x18\x05 \x01(\tB/\n\x1aorg.tensorflow.distruntimeB\x0cServerProtosP\x01\xf8\x01\x01\x62\x06proto3')
25 | ,
26 | dependencies=[tensorflow_dot_core_dot_protobuf_dot_config__pb2.DESCRIPTOR,tensorflow_dot_core_dot_protobuf_dot_cluster__pb2.DESCRIPTOR,])
27 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
28 |
29 |
30 |
31 |
32 | _SERVERDEF = _descriptor.Descriptor(
33 | name='ServerDef',
34 | full_name='tensorflow.ServerDef',
35 | filename=None,
36 | file=DESCRIPTOR,
37 | containing_type=None,
38 | fields=[
39 | _descriptor.FieldDescriptor(
40 | name='cluster', full_name='tensorflow.ServerDef.cluster', index=0,
41 | number=1, type=11, cpp_type=10, label=1,
42 | has_default_value=False, default_value=None,
43 | message_type=None, enum_type=None, containing_type=None,
44 | is_extension=False, extension_scope=None,
45 | options=None),
46 | _descriptor.FieldDescriptor(
47 | name='job_name', full_name='tensorflow.ServerDef.job_name', index=1,
48 | number=2, type=9, cpp_type=9, label=1,
49 | has_default_value=False, default_value=_b("").decode('utf-8'),
50 | message_type=None, enum_type=None, containing_type=None,
51 | is_extension=False, extension_scope=None,
52 | options=None),
53 | _descriptor.FieldDescriptor(
54 | name='task_index', full_name='tensorflow.ServerDef.task_index', index=2,
55 | number=3, type=5, cpp_type=1, label=1,
56 | has_default_value=False, default_value=0,
57 | message_type=None, enum_type=None, containing_type=None,
58 | is_extension=False, extension_scope=None,
59 | options=None),
60 | _descriptor.FieldDescriptor(
61 | name='default_session_config', full_name='tensorflow.ServerDef.default_session_config', index=3,
62 | number=4, type=11, cpp_type=10, label=1,
63 | has_default_value=False, default_value=None,
64 | message_type=None, enum_type=None, containing_type=None,
65 | is_extension=False, extension_scope=None,
66 | options=None),
67 | _descriptor.FieldDescriptor(
68 | name='protocol', full_name='tensorflow.ServerDef.protocol', index=4,
69 | number=5, type=9, cpp_type=9, label=1,
70 | has_default_value=False, default_value=_b("").decode('utf-8'),
71 | message_type=None, enum_type=None, containing_type=None,
72 | is_extension=False, extension_scope=None,
73 | options=None),
74 | ],
75 | extensions=[
76 | ],
77 | nested_types=[],
78 | enum_types=[
79 | ],
80 | options=None,
81 | is_extendable=False,
82 | syntax='proto3',
83 | extension_ranges=[],
84 | oneofs=[
85 | ],
86 | serialized_start=144,
87 | serialized_end=309,
88 | )
89 |
90 | _SERVERDEF.fields_by_name['cluster'].message_type = tensorflow_dot_core_dot_protobuf_dot_cluster__pb2._CLUSTERDEF
91 | _SERVERDEF.fields_by_name['default_session_config'].message_type = tensorflow_dot_core_dot_protobuf_dot_config__pb2._CONFIGPROTO
92 | DESCRIPTOR.message_types_by_name['ServerDef'] = _SERVERDEF
93 |
94 | ServerDef = _reflection.GeneratedProtocolMessageType('ServerDef', (_message.Message,), dict(
95 | DESCRIPTOR = _SERVERDEF,
96 | __module__ = 'tensorflow.core.protobuf.tensorflow_server_pb2'
97 | # @@protoc_insertion_point(class_scope:tensorflow.ServerDef)
98 | ))
99 | _sym_db.RegisterMessage(ServerDef)
100 |
101 |
102 | DESCRIPTOR.has_options = True
103 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032org.tensorflow.distruntimeB\014ServerProtosP\001\370\001\001'))
104 | # @@protoc_insertion_point(module_scope)
105 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/graph_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/graph.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..framework import node_def_pb2 as tensorflow_dot_core_dot_framework_dot_node__def__pb2
17 | from ..framework import function_pb2 as tensorflow_dot_core_dot_framework_dot_function__pb2
18 | from ..framework import versions_pb2 as tensorflow_dot_core_dot_framework_dot_versions__pb2
19 |
20 |
21 | DESCRIPTOR = _descriptor.FileDescriptor(
22 | name='tensorflow/core/framework/graph.proto',
23 | package='tensorflow',
24 | syntax='proto3',
25 | serialized_pb=_b('\n%tensorflow/core/framework/graph.proto\x12\ntensorflow\x1a(tensorflow/core/framework/node_def.proto\x1a(tensorflow/core/framework/function.proto\x1a(tensorflow/core/framework/versions.proto\"\x9d\x01\n\x08GraphDef\x12!\n\x04node\x18\x01 \x03(\x0b\x32\x13.tensorflow.NodeDef\x12(\n\x08versions\x18\x04 \x01(\x0b\x32\x16.tensorflow.VersionDef\x12\x13\n\x07version\x18\x03 \x01(\x05\x42\x02\x18\x01\x12/\n\x07library\x18\x02 \x01(\x0b\x32\x1e.tensorflow.FunctionDefLibraryB,\n\x18org.tensorflow.frameworkB\x0bGraphProtosP\x01\xf8\x01\x01\x62\x06proto3')
26 | ,
27 | dependencies=[tensorflow_dot_core_dot_framework_dot_node__def__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_function__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_versions__pb2.DESCRIPTOR,])
28 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
29 |
30 |
31 |
32 |
33 | _GRAPHDEF = _descriptor.Descriptor(
34 | name='GraphDef',
35 | full_name='tensorflow.GraphDef',
36 | filename=None,
37 | file=DESCRIPTOR,
38 | containing_type=None,
39 | fields=[
40 | _descriptor.FieldDescriptor(
41 | name='node', full_name='tensorflow.GraphDef.node', index=0,
42 | number=1, type=11, cpp_type=10, label=3,
43 | has_default_value=False, default_value=[],
44 | message_type=None, enum_type=None, containing_type=None,
45 | is_extension=False, extension_scope=None,
46 | options=None),
47 | _descriptor.FieldDescriptor(
48 | name='versions', full_name='tensorflow.GraphDef.versions', index=1,
49 | number=4, type=11, cpp_type=10, label=1,
50 | has_default_value=False, default_value=None,
51 | message_type=None, enum_type=None, containing_type=None,
52 | is_extension=False, extension_scope=None,
53 | options=None),
54 | _descriptor.FieldDescriptor(
55 | name='version', full_name='tensorflow.GraphDef.version', index=2,
56 | number=3, type=5, cpp_type=1, label=1,
57 | has_default_value=False, default_value=0,
58 | message_type=None, enum_type=None, containing_type=None,
59 | is_extension=False, extension_scope=None,
60 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))),
61 | _descriptor.FieldDescriptor(
62 | name='library', full_name='tensorflow.GraphDef.library', index=3,
63 | number=2, type=11, cpp_type=10, label=1,
64 | has_default_value=False, default_value=None,
65 | message_type=None, enum_type=None, containing_type=None,
66 | is_extension=False, extension_scope=None,
67 | options=None),
68 | ],
69 | extensions=[
70 | ],
71 | nested_types=[],
72 | enum_types=[
73 | ],
74 | options=None,
75 | is_extendable=False,
76 | syntax='proto3',
77 | extension_ranges=[],
78 | oneofs=[
79 | ],
80 | serialized_start=180,
81 | serialized_end=337,
82 | )
83 |
84 | _GRAPHDEF.fields_by_name['node'].message_type = tensorflow_dot_core_dot_framework_dot_node__def__pb2._NODEDEF
85 | _GRAPHDEF.fields_by_name['versions'].message_type = tensorflow_dot_core_dot_framework_dot_versions__pb2._VERSIONDEF
86 | _GRAPHDEF.fields_by_name['library'].message_type = tensorflow_dot_core_dot_framework_dot_function__pb2._FUNCTIONDEFLIBRARY
87 | DESCRIPTOR.message_types_by_name['GraphDef'] = _GRAPHDEF
88 |
89 | GraphDef = _reflection.GeneratedProtocolMessageType('GraphDef', (_message.Message,), dict(
90 | DESCRIPTOR = _GRAPHDEF,
91 | __module__ = 'tensorflow.core.framework.graph_pb2'
92 | # @@protoc_insertion_point(class_scope:tensorflow.GraphDef)
93 | ))
94 | _sym_db.RegisterMessage(GraphDef)
95 |
96 |
97 | DESCRIPTOR.has_options = True
98 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\013GraphProtosP\001\370\001\001'))
99 | _GRAPHDEF.fields_by_name['version'].has_options = True
100 | _GRAPHDEF.fields_by_name['version']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))
101 | # @@protoc_insertion_point(module_scope)
102 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/tensor_slice_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/tensor_slice.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/tensor_slice.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n,tensorflow/core/framework/tensor_slice.proto\x12\ntensorflow\"\x80\x01\n\x10TensorSliceProto\x12\x33\n\x06\x65xtent\x18\x01 \x03(\x0b\x32#.tensorflow.TensorSliceProto.Extent\x1a\x37\n\x06\x45xtent\x12\r\n\x05start\x18\x01 \x01(\x03\x12\x10\n\x06length\x18\x02 \x01(\x03H\x00\x42\x0c\n\nhas_lengthB2\n\x18org.tensorflow.frameworkB\x11TensorSliceProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _TENSORSLICEPROTO_EXTENT = _descriptor.Descriptor(
30 | name='Extent',
31 | full_name='tensorflow.TensorSliceProto.Extent',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='start', full_name='tensorflow.TensorSliceProto.Extent.start', index=0,
38 | number=1, type=3, cpp_type=2, label=1,
39 | has_default_value=False, default_value=0,
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='length', full_name='tensorflow.TensorSliceProto.Extent.length', index=1,
45 | number=2, type=3, cpp_type=2, label=1,
46 | has_default_value=False, default_value=0,
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | ],
51 | extensions=[
52 | ],
53 | nested_types=[],
54 | enum_types=[
55 | ],
56 | options=None,
57 | is_extendable=False,
58 | syntax='proto3',
59 | extension_ranges=[],
60 | oneofs=[
61 | _descriptor.OneofDescriptor(
62 | name='has_length', full_name='tensorflow.TensorSliceProto.Extent.has_length',
63 | index=0, containing_type=None, fields=[]),
64 | ],
65 | serialized_start=134,
66 | serialized_end=189,
67 | )
68 |
69 | _TENSORSLICEPROTO = _descriptor.Descriptor(
70 | name='TensorSliceProto',
71 | full_name='tensorflow.TensorSliceProto',
72 | filename=None,
73 | file=DESCRIPTOR,
74 | containing_type=None,
75 | fields=[
76 | _descriptor.FieldDescriptor(
77 | name='extent', full_name='tensorflow.TensorSliceProto.extent', index=0,
78 | number=1, type=11, cpp_type=10, label=3,
79 | has_default_value=False, default_value=[],
80 | message_type=None, enum_type=None, containing_type=None,
81 | is_extension=False, extension_scope=None,
82 | options=None),
83 | ],
84 | extensions=[
85 | ],
86 | nested_types=[_TENSORSLICEPROTO_EXTENT, ],
87 | enum_types=[
88 | ],
89 | options=None,
90 | is_extendable=False,
91 | syntax='proto3',
92 | extension_ranges=[],
93 | oneofs=[
94 | ],
95 | serialized_start=61,
96 | serialized_end=189,
97 | )
98 |
99 | _TENSORSLICEPROTO_EXTENT.containing_type = _TENSORSLICEPROTO
100 | _TENSORSLICEPROTO_EXTENT.oneofs_by_name['has_length'].fields.append(
101 | _TENSORSLICEPROTO_EXTENT.fields_by_name['length'])
102 | _TENSORSLICEPROTO_EXTENT.fields_by_name['length'].containing_oneof = _TENSORSLICEPROTO_EXTENT.oneofs_by_name['has_length']
103 | _TENSORSLICEPROTO.fields_by_name['extent'].message_type = _TENSORSLICEPROTO_EXTENT
104 | DESCRIPTOR.message_types_by_name['TensorSliceProto'] = _TENSORSLICEPROTO
105 |
106 | TensorSliceProto = _reflection.GeneratedProtocolMessageType('TensorSliceProto', (_message.Message,), dict(
107 |
108 | Extent = _reflection.GeneratedProtocolMessageType('Extent', (_message.Message,), dict(
109 | DESCRIPTOR = _TENSORSLICEPROTO_EXTENT,
110 | __module__ = 'tensorflow.core.framework.tensor_slice_pb2'
111 | # @@protoc_insertion_point(class_scope:tensorflow.TensorSliceProto.Extent)
112 | ))
113 | ,
114 | DESCRIPTOR = _TENSORSLICEPROTO,
115 | __module__ = 'tensorflow.core.framework.tensor_slice_pb2'
116 | # @@protoc_insertion_point(class_scope:tensorflow.TensorSliceProto)
117 | ))
118 | _sym_db.RegisterMessage(TensorSliceProto)
119 | _sym_db.RegisterMessage(TensorSliceProto.Extent)
120 |
121 |
122 | DESCRIPTOR.has_options = True
123 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\021TensorSliceProtosP\001\370\001\001'))
124 | # @@protoc_insertion_point(module_scope)
125 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/lib/core/error_codes_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/lib/core/error_codes.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf.internal import enum_type_wrapper
7 | from google.protobuf import descriptor as _descriptor
8 | from google.protobuf import message as _message
9 | from google.protobuf import reflection as _reflection
10 | from google.protobuf import symbol_database as _symbol_database
11 | from google.protobuf import descriptor_pb2
12 | # @@protoc_insertion_point(imports)
13 |
14 | _sym_db = _symbol_database.Default()
15 |
16 |
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/lib/core/error_codes.proto',
21 | package='tensorflow.error',
22 | syntax='proto3',
23 | serialized_pb=_b('\n*tensorflow/core/lib/core/error_codes.proto\x12\x10tensorflow.error*\x84\x03\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\r\n\tCANCELLED\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x14\n\x10INVALID_ARGUMENT\x10\x03\x12\x15\n\x11\x44\x45\x41\x44LINE_EXCEEDED\x10\x04\x12\r\n\tNOT_FOUND\x10\x05\x12\x12\n\x0e\x41LREADY_EXISTS\x10\x06\x12\x15\n\x11PERMISSION_DENIED\x10\x07\x12\x13\n\x0fUNAUTHENTICATED\x10\x10\x12\x16\n\x12RESOURCE_EXHAUSTED\x10\x08\x12\x17\n\x13\x46\x41ILED_PRECONDITION\x10\t\x12\x0b\n\x07\x41\x42ORTED\x10\n\x12\x10\n\x0cOUT_OF_RANGE\x10\x0b\x12\x11\n\rUNIMPLEMENTED\x10\x0c\x12\x0c\n\x08INTERNAL\x10\r\x12\x0f\n\x0bUNAVAILABLE\x10\x0e\x12\r\n\tDATA_LOSS\x10\x0f\x12K\nGDO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_\x10\x14\x42\x31\n\x18org.tensorflow.frameworkB\x10\x45rrorCodesProtosP\x01\xf8\x01\x01\x62\x06proto3')
24 | )
25 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
26 |
27 | _CODE = _descriptor.EnumDescriptor(
28 | name='Code',
29 | full_name='tensorflow.error.Code',
30 | filename=None,
31 | file=DESCRIPTOR,
32 | values=[
33 | _descriptor.EnumValueDescriptor(
34 | name='OK', index=0, number=0,
35 | options=None,
36 | type=None),
37 | _descriptor.EnumValueDescriptor(
38 | name='CANCELLED', index=1, number=1,
39 | options=None,
40 | type=None),
41 | _descriptor.EnumValueDescriptor(
42 | name='UNKNOWN', index=2, number=2,
43 | options=None,
44 | type=None),
45 | _descriptor.EnumValueDescriptor(
46 | name='INVALID_ARGUMENT', index=3, number=3,
47 | options=None,
48 | type=None),
49 | _descriptor.EnumValueDescriptor(
50 | name='DEADLINE_EXCEEDED', index=4, number=4,
51 | options=None,
52 | type=None),
53 | _descriptor.EnumValueDescriptor(
54 | name='NOT_FOUND', index=5, number=5,
55 | options=None,
56 | type=None),
57 | _descriptor.EnumValueDescriptor(
58 | name='ALREADY_EXISTS', index=6, number=6,
59 | options=None,
60 | type=None),
61 | _descriptor.EnumValueDescriptor(
62 | name='PERMISSION_DENIED', index=7, number=7,
63 | options=None,
64 | type=None),
65 | _descriptor.EnumValueDescriptor(
66 | name='UNAUTHENTICATED', index=8, number=16,
67 | options=None,
68 | type=None),
69 | _descriptor.EnumValueDescriptor(
70 | name='RESOURCE_EXHAUSTED', index=9, number=8,
71 | options=None,
72 | type=None),
73 | _descriptor.EnumValueDescriptor(
74 | name='FAILED_PRECONDITION', index=10, number=9,
75 | options=None,
76 | type=None),
77 | _descriptor.EnumValueDescriptor(
78 | name='ABORTED', index=11, number=10,
79 | options=None,
80 | type=None),
81 | _descriptor.EnumValueDescriptor(
82 | name='OUT_OF_RANGE', index=12, number=11,
83 | options=None,
84 | type=None),
85 | _descriptor.EnumValueDescriptor(
86 | name='UNIMPLEMENTED', index=13, number=12,
87 | options=None,
88 | type=None),
89 | _descriptor.EnumValueDescriptor(
90 | name='INTERNAL', index=14, number=13,
91 | options=None,
92 | type=None),
93 | _descriptor.EnumValueDescriptor(
94 | name='UNAVAILABLE', index=15, number=14,
95 | options=None,
96 | type=None),
97 | _descriptor.EnumValueDescriptor(
98 | name='DATA_LOSS', index=16, number=15,
99 | options=None,
100 | type=None),
101 | _descriptor.EnumValueDescriptor(
102 | name='DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_', index=17, number=20,
103 | options=None,
104 | type=None),
105 | ],
106 | containing_type=None,
107 | options=None,
108 | serialized_start=65,
109 | serialized_end=453,
110 | )
111 | _sym_db.RegisterEnumDescriptor(_CODE)
112 |
113 | Code = enum_type_wrapper.EnumTypeWrapper(_CODE)
114 | OK = 0
115 | CANCELLED = 1
116 | UNKNOWN = 2
117 | INVALID_ARGUMENT = 3
118 | DEADLINE_EXCEEDED = 4
119 | NOT_FOUND = 5
120 | ALREADY_EXISTS = 6
121 | PERMISSION_DENIED = 7
122 | UNAUTHENTICATED = 16
123 | RESOURCE_EXHAUSTED = 8
124 | FAILED_PRECONDITION = 9
125 | ABORTED = 10
126 | OUT_OF_RANGE = 11
127 | UNIMPLEMENTED = 12
128 | INTERNAL = 13
129 | UNAVAILABLE = 14
130 | DATA_LOSS = 15
131 | DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_ = 20
132 |
133 |
134 | DESCRIPTOR.enum_types_by_name['Code'] = _CODE
135 |
136 |
137 | DESCRIPTOR.has_options = True
138 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\020ErrorCodesProtosP\001\370\001\001'))
139 | # @@protoc_insertion_point(module_scope)
140 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/saver_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/saver.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/protobuf/saver.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n$tensorflow/core/protobuf/saver.proto\x12\ntensorflow\"\x9e\x02\n\x08SaverDef\x12\x1c\n\x14\x66ilename_tensor_name\x18\x01 \x01(\t\x12\x18\n\x10save_tensor_name\x18\x02 \x01(\t\x12\x17\n\x0frestore_op_name\x18\x03 \x01(\t\x12\x13\n\x0bmax_to_keep\x18\x04 \x01(\x05\x12\x0f\n\x07sharded\x18\x05 \x01(\x08\x12%\n\x1dkeep_checkpoint_every_n_hours\x18\x06 \x01(\x02\x12=\n\x07version\x18\x07 \x01(\x0e\x32,.tensorflow.SaverDef.CheckpointFormatVersion\"5\n\x17\x43heckpointFormatVersion\x12\n\n\x06LEGACY\x10\x00\x12\x06\n\x02V1\x10\x01\x12\x06\n\x02V2\x10\x02\x42\'\n\x13org.tensorflow.utilB\x0bSaverProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 | _SAVERDEF_CHECKPOINTFORMATVERSION = _descriptor.EnumDescriptor(
29 | name='CheckpointFormatVersion',
30 | full_name='tensorflow.SaverDef.CheckpointFormatVersion',
31 | filename=None,
32 | file=DESCRIPTOR,
33 | values=[
34 | _descriptor.EnumValueDescriptor(
35 | name='LEGACY', index=0, number=0,
36 | options=None,
37 | type=None),
38 | _descriptor.EnumValueDescriptor(
39 | name='V1', index=1, number=1,
40 | options=None,
41 | type=None),
42 | _descriptor.EnumValueDescriptor(
43 | name='V2', index=2, number=2,
44 | options=None,
45 | type=None),
46 | ],
47 | containing_type=None,
48 | options=None,
49 | serialized_start=286,
50 | serialized_end=339,
51 | )
52 | _sym_db.RegisterEnumDescriptor(_SAVERDEF_CHECKPOINTFORMATVERSION)
53 |
54 |
55 | _SAVERDEF = _descriptor.Descriptor(
56 | name='SaverDef',
57 | full_name='tensorflow.SaverDef',
58 | filename=None,
59 | file=DESCRIPTOR,
60 | containing_type=None,
61 | fields=[
62 | _descriptor.FieldDescriptor(
63 | name='filename_tensor_name', full_name='tensorflow.SaverDef.filename_tensor_name', index=0,
64 | number=1, type=9, cpp_type=9, label=1,
65 | has_default_value=False, default_value=_b("").decode('utf-8'),
66 | message_type=None, enum_type=None, containing_type=None,
67 | is_extension=False, extension_scope=None,
68 | options=None),
69 | _descriptor.FieldDescriptor(
70 | name='save_tensor_name', full_name='tensorflow.SaverDef.save_tensor_name', index=1,
71 | number=2, type=9, cpp_type=9, label=1,
72 | has_default_value=False, default_value=_b("").decode('utf-8'),
73 | message_type=None, enum_type=None, containing_type=None,
74 | is_extension=False, extension_scope=None,
75 | options=None),
76 | _descriptor.FieldDescriptor(
77 | name='restore_op_name', full_name='tensorflow.SaverDef.restore_op_name', index=2,
78 | number=3, type=9, cpp_type=9, label=1,
79 | has_default_value=False, default_value=_b("").decode('utf-8'),
80 | message_type=None, enum_type=None, containing_type=None,
81 | is_extension=False, extension_scope=None,
82 | options=None),
83 | _descriptor.FieldDescriptor(
84 | name='max_to_keep', full_name='tensorflow.SaverDef.max_to_keep', index=3,
85 | number=4, type=5, cpp_type=1, label=1,
86 | has_default_value=False, default_value=0,
87 | message_type=None, enum_type=None, containing_type=None,
88 | is_extension=False, extension_scope=None,
89 | options=None),
90 | _descriptor.FieldDescriptor(
91 | name='sharded', full_name='tensorflow.SaverDef.sharded', index=4,
92 | number=5, type=8, cpp_type=7, label=1,
93 | has_default_value=False, default_value=False,
94 | message_type=None, enum_type=None, containing_type=None,
95 | is_extension=False, extension_scope=None,
96 | options=None),
97 | _descriptor.FieldDescriptor(
98 | name='keep_checkpoint_every_n_hours', full_name='tensorflow.SaverDef.keep_checkpoint_every_n_hours', index=5,
99 | number=6, type=2, cpp_type=6, label=1,
100 | has_default_value=False, default_value=float(0),
101 | message_type=None, enum_type=None, containing_type=None,
102 | is_extension=False, extension_scope=None,
103 | options=None),
104 | _descriptor.FieldDescriptor(
105 | name='version', full_name='tensorflow.SaverDef.version', index=6,
106 | number=7, type=14, cpp_type=8, label=1,
107 | has_default_value=False, default_value=0,
108 | message_type=None, enum_type=None, containing_type=None,
109 | is_extension=False, extension_scope=None,
110 | options=None),
111 | ],
112 | extensions=[
113 | ],
114 | nested_types=[],
115 | enum_types=[
116 | _SAVERDEF_CHECKPOINTFORMATVERSION,
117 | ],
118 | options=None,
119 | is_extendable=False,
120 | syntax='proto3',
121 | extension_ranges=[],
122 | oneofs=[
123 | ],
124 | serialized_start=53,
125 | serialized_end=339,
126 | )
127 |
128 | _SAVERDEF.fields_by_name['version'].enum_type = _SAVERDEF_CHECKPOINTFORMATVERSION
129 | _SAVERDEF_CHECKPOINTFORMATVERSION.containing_type = _SAVERDEF
130 | DESCRIPTOR.message_types_by_name['SaverDef'] = _SAVERDEF
131 |
132 | SaverDef = _reflection.GeneratedProtocolMessageType('SaverDef', (_message.Message,), dict(
133 | DESCRIPTOR = _SAVERDEF,
134 | __module__ = 'tensorflow.core.protobuf.saver_pb2'
135 | # @@protoc_insertion_point(class_scope:tensorflow.SaverDef)
136 | ))
137 | _sym_db.RegisterMessage(SaverDef)
138 |
139 |
140 | DESCRIPTOR.has_options = True
141 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023org.tensorflow.utilB\013SaverProtosP\001\370\001\001'))
142 | # @@protoc_insertion_point(module_scope)
143 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/device_attributes_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/device_attributes.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/device_attributes.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n1tensorflow/core/framework/device_attributes.proto\x12\ntensorflow\" \n\x0e\x44\x65viceLocality\x12\x0e\n\x06\x62us_id\x18\x01 \x01(\x05\"\xac\x01\n\x10\x44\x65viceAttributes\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12\x14\n\x0cmemory_limit\x18\x04 \x01(\x03\x12,\n\x08locality\x18\x05 \x01(\x0b\x32\x1a.tensorflow.DeviceLocality\x12\x13\n\x0bincarnation\x18\x06 \x01(\x06\x12\x1c\n\x14physical_device_desc\x18\x07 \x01(\tB7\n\x18org.tensorflow.frameworkB\x16\x44\x65viceAttributesProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _DEVICELOCALITY = _descriptor.Descriptor(
30 | name='DeviceLocality',
31 | full_name='tensorflow.DeviceLocality',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='bus_id', full_name='tensorflow.DeviceLocality.bus_id', index=0,
38 | number=1, type=5, cpp_type=1, label=1,
39 | has_default_value=False, default_value=0,
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | ],
44 | extensions=[
45 | ],
46 | nested_types=[],
47 | enum_types=[
48 | ],
49 | options=None,
50 | is_extendable=False,
51 | syntax='proto3',
52 | extension_ranges=[],
53 | oneofs=[
54 | ],
55 | serialized_start=65,
56 | serialized_end=97,
57 | )
58 |
59 |
60 | _DEVICEATTRIBUTES = _descriptor.Descriptor(
61 | name='DeviceAttributes',
62 | full_name='tensorflow.DeviceAttributes',
63 | filename=None,
64 | file=DESCRIPTOR,
65 | containing_type=None,
66 | fields=[
67 | _descriptor.FieldDescriptor(
68 | name='name', full_name='tensorflow.DeviceAttributes.name', index=0,
69 | number=1, type=9, cpp_type=9, label=1,
70 | has_default_value=False, default_value=_b("").decode('utf-8'),
71 | message_type=None, enum_type=None, containing_type=None,
72 | is_extension=False, extension_scope=None,
73 | options=None),
74 | _descriptor.FieldDescriptor(
75 | name='device_type', full_name='tensorflow.DeviceAttributes.device_type', index=1,
76 | number=2, type=9, cpp_type=9, label=1,
77 | has_default_value=False, default_value=_b("").decode('utf-8'),
78 | message_type=None, enum_type=None, containing_type=None,
79 | is_extension=False, extension_scope=None,
80 | options=None),
81 | _descriptor.FieldDescriptor(
82 | name='memory_limit', full_name='tensorflow.DeviceAttributes.memory_limit', index=2,
83 | number=4, type=3, cpp_type=2, label=1,
84 | has_default_value=False, default_value=0,
85 | message_type=None, enum_type=None, containing_type=None,
86 | is_extension=False, extension_scope=None,
87 | options=None),
88 | _descriptor.FieldDescriptor(
89 | name='locality', full_name='tensorflow.DeviceAttributes.locality', index=3,
90 | number=5, type=11, cpp_type=10, label=1,
91 | has_default_value=False, default_value=None,
92 | message_type=None, enum_type=None, containing_type=None,
93 | is_extension=False, extension_scope=None,
94 | options=None),
95 | _descriptor.FieldDescriptor(
96 | name='incarnation', full_name='tensorflow.DeviceAttributes.incarnation', index=4,
97 | number=6, type=6, cpp_type=4, label=1,
98 | has_default_value=False, default_value=0,
99 | message_type=None, enum_type=None, containing_type=None,
100 | is_extension=False, extension_scope=None,
101 | options=None),
102 | _descriptor.FieldDescriptor(
103 | name='physical_device_desc', full_name='tensorflow.DeviceAttributes.physical_device_desc', index=5,
104 | number=7, type=9, cpp_type=9, label=1,
105 | has_default_value=False, default_value=_b("").decode('utf-8'),
106 | message_type=None, enum_type=None, containing_type=None,
107 | is_extension=False, extension_scope=None,
108 | options=None),
109 | ],
110 | extensions=[
111 | ],
112 | nested_types=[],
113 | enum_types=[
114 | ],
115 | options=None,
116 | is_extendable=False,
117 | syntax='proto3',
118 | extension_ranges=[],
119 | oneofs=[
120 | ],
121 | serialized_start=100,
122 | serialized_end=272,
123 | )
124 |
125 | _DEVICEATTRIBUTES.fields_by_name['locality'].message_type = _DEVICELOCALITY
126 | DESCRIPTOR.message_types_by_name['DeviceLocality'] = _DEVICELOCALITY
127 | DESCRIPTOR.message_types_by_name['DeviceAttributes'] = _DEVICEATTRIBUTES
128 |
129 | DeviceLocality = _reflection.GeneratedProtocolMessageType('DeviceLocality', (_message.Message,), dict(
130 | DESCRIPTOR = _DEVICELOCALITY,
131 | __module__ = 'tensorflow.core.framework.device_attributes_pb2'
132 | # @@protoc_insertion_point(class_scope:tensorflow.DeviceLocality)
133 | ))
134 | _sym_db.RegisterMessage(DeviceLocality)
135 |
136 | DeviceAttributes = _reflection.GeneratedProtocolMessageType('DeviceAttributes', (_message.Message,), dict(
137 | DESCRIPTOR = _DEVICEATTRIBUTES,
138 | __module__ = 'tensorflow.core.framework.device_attributes_pb2'
139 | # @@protoc_insertion_point(class_scope:tensorflow.DeviceAttributes)
140 | ))
141 | _sym_db.RegisterMessage(DeviceAttributes)
142 |
143 |
144 | DESCRIPTOR.has_options = True
145 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\026DeviceAttributesProtosP\001\370\001\001'))
146 | # @@protoc_insertion_point(module_scope)
147 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.suo
8 | *.user
9 | *.userosscache
10 | *.sln.docstates
11 |
12 | # User-specific files (MonoDevelop/Xamarin Studio)
13 | *.userprefs
14 |
15 | # Build results
16 | [Dd]ebug/
17 | [Dd]ebugPublic/
18 | [Rr]elease/
19 | [Rr]eleases/
20 | x64/
21 | x86/
22 | bld/
23 | [Bb]in/
24 | [Oo]bj/
25 | [Ll]og/
26 |
27 | # Visual Studio 2015 cache/options directory
28 | .vs/
29 | # Uncomment if you have tasks that create the project's static files in wwwroot
30 | #wwwroot/
31 |
32 | # MSTest test Results
33 | [Tt]est[Rr]esult*/
34 | [Bb]uild[Ll]og.*
35 |
36 | # NUNIT
37 | *.VisualState.xml
38 | TestResult.xml
39 |
40 | # Build Results of an ATL Project
41 | [Dd]ebugPS/
42 | [Rr]eleasePS/
43 | dlldata.c
44 |
45 | # .NET Core
46 | project.lock.json
47 | project.fragment.lock.json
48 | artifacts/
49 | **/Properties/launchSettings.json
50 |
51 | *_i.c
52 | *_p.c
53 | *_i.h
54 | *.ilk
55 | *.meta
56 | *.obj
57 | *.pch
58 | *.pdb
59 | *.pgc
60 | *.pgd
61 | *.rsp
62 | *.sbr
63 | *.tlb
64 | *.tli
65 | *.tlh
66 | *.tmp
67 | *.tmp_proj
68 | *.log
69 | *.vspscc
70 | *.vssscc
71 | .builds
72 | *.pidb
73 | *.svclog
74 | *.scc
75 |
76 | # Chutzpah Test files
77 | _Chutzpah*
78 |
79 | # Visual C++ cache files
80 | ipch/
81 | *.aps
82 | *.ncb
83 | *.opendb
84 | *.opensdf
85 | *.sdf
86 | *.cachefile
87 | *.VC.db
88 | *.VC.VC.opendb
89 |
90 | # Visual Studio profiler
91 | *.psess
92 | *.vsp
93 | *.vspx
94 | *.sap
95 |
96 | # TFS 2012 Local Workspace
97 | $tf/
98 |
99 | # Guidance Automation Toolkit
100 | *.gpState
101 |
102 | # ReSharper is a .NET coding add-in
103 | _ReSharper*/
104 | *.[Rr]e[Ss]harper
105 | *.DotSettings.user
106 |
107 | # JustCode is a .NET coding add-in
108 | .JustCode
109 |
110 | # TeamCity is a build add-in
111 | _TeamCity*
112 |
113 | # DotCover is a Code Coverage Tool
114 | *.dotCover
115 |
116 | # Visual Studio code coverage results
117 | *.coverage
118 | *.coveragexml
119 |
120 | # NCrunch
121 | _NCrunch_*
122 | .*crunch*.local.xml
123 | nCrunchTemp_*
124 |
125 | # MightyMoose
126 | *.mm.*
127 | AutoTest.Net/
128 |
129 | # Web workbench (sass)
130 | .sass-cache/
131 |
132 | # Installshield output folder
133 | [Ee]xpress/
134 |
135 | # DocProject is a documentation generator add-in
136 | DocProject/buildhelp/
137 | DocProject/Help/*.HxT
138 | DocProject/Help/*.HxC
139 | DocProject/Help/*.hhc
140 | DocProject/Help/*.hhk
141 | DocProject/Help/*.hhp
142 | DocProject/Help/Html2
143 | DocProject/Help/html
144 |
145 | # Click-Once directory
146 | publish/
147 |
148 | # Publish Web Output
149 | *.[Pp]ublish.xml
150 | *.azurePubxml
151 | # TODO: Comment the next line if you want to checkin your web deploy settings
152 | # but database connection strings (with potential passwords) will be unencrypted
153 | *.pubxml
154 | *.publishproj
155 |
156 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
157 | # checkin your Azure Web App publish settings, but sensitive information contained
158 | # in these scripts will be unencrypted
159 | PublishScripts/
160 |
161 | # NuGet Packages
162 | *.nupkg
163 | # The packages folder can be ignored because of Package Restore
164 | **/packages/*
165 | # except build/, which is used as an MSBuild target.
166 | !**/packages/build/
167 | # Uncomment if necessary however generally it will be regenerated when needed
168 | #!**/packages/repositories.config
169 | # NuGet v3's project.json files produces more ignorable files
170 | *.nuget.props
171 | *.nuget.targets
172 |
173 | # Microsoft Azure Build Output
174 | csx/
175 | *.build.csdef
176 |
177 | # Microsoft Azure Emulator
178 | ecf/
179 | rcf/
180 |
181 | # Windows Store app package directories and files
182 | AppPackages/
183 | BundleArtifacts/
184 | Package.StoreAssociation.xml
185 | _pkginfo.txt
186 |
187 | # Visual Studio cache files
188 | # files ending in .cache can be ignored
189 | *.[Cc]ache
190 | # but keep track of directories ending in .cache
191 | !*.[Cc]ache/
192 |
193 | # Others
194 | ClientBin/
195 | ~$*
196 | *~
197 | *.dbmdl
198 | *.dbproj.schemaview
199 | *.jfm
200 | *.pfx
201 | *.publishsettings
202 | orleans.codegen.cs
203 |
204 | # Since there are multiple workflows, uncomment next line to ignore bower_components
205 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
206 | #bower_components/
207 |
208 | # RIA/Silverlight projects
209 | Generated_Code/
210 |
211 | # Backup & report files from converting an old project file
212 | # to a newer Visual Studio version. Backup files are not needed,
213 | # because we have git ;-)
214 | _UpgradeReport_Files/
215 | Backup*/
216 | UpgradeLog*.XML
217 | UpgradeLog*.htm
218 |
219 | # SQL Server files
220 | *.mdf
221 | *.ldf
222 | *.ndf
223 |
224 | # Business Intelligence projects
225 | *.rdl.data
226 | *.bim.layout
227 | *.bim_*.settings
228 |
229 | # Microsoft Fakes
230 | FakesAssemblies/
231 |
232 | # GhostDoc plugin setting file
233 | *.GhostDoc.xml
234 |
235 | # Node.js Tools for Visual Studio
236 | .ntvs_analysis.dat
237 | node_modules/
238 |
239 | # Typescript v1 declaration files
240 | typings/
241 |
242 | # Visual Studio 6 build log
243 | *.plg
244 |
245 | # Visual Studio 6 workspace options file
246 | *.opt
247 |
248 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
249 | *.vbw
250 |
251 | # Visual Studio LightSwitch build output
252 | **/*.HTMLClient/GeneratedArtifacts
253 | **/*.DesktopClient/GeneratedArtifacts
254 | **/*.DesktopClient/ModelManifest.xml
255 | **/*.Server/GeneratedArtifacts
256 | **/*.Server/ModelManifest.xml
257 | _Pvt_Extensions
258 |
259 | # Paket dependency manager
260 | .paket/paket.exe
261 | paket-files/
262 |
263 | # FAKE - F# Make
264 | .fake/
265 |
266 | # JetBrains Rider
267 | .idea/
268 | *.sln.iml
269 |
270 | # CodeRush
271 | .cr/
272 |
273 | # Python Tools for Visual Studio (PTVS)
274 | __pycache__/
275 | *.pyc
276 |
277 | # Cake - Uncomment if you are using it
278 | # tools/**
279 | # !tools/packages.config
280 |
281 | # Telerik's JustMock configuration file
282 | *.jmconfig
283 |
284 | # BizTalk build output
285 | *.btp.cs
286 | *.btm.cs
287 | *.odx.cs
288 | *.xsd.cs
289 |
290 | .ipynb_checkpoints/
291 | .vscode
292 | .cache
293 | .pytest_cache
294 | *.egg-info
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/protobuf/cluster_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/protobuf/cluster.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/protobuf/cluster.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n&tensorflow/core/protobuf/cluster.proto\x12\ntensorflow\"r\n\x06JobDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05tasks\x18\x02 \x03(\x0b\x32\x1d.tensorflow.JobDef.TasksEntry\x1a,\n\nTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"-\n\nClusterDef\x12\x1f\n\x03job\x18\x01 \x03(\x0b\x32\x12.tensorflow.JobDefB0\n\x1aorg.tensorflow.distruntimeB\rClusterProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _JOBDEF_TASKSENTRY = _descriptor.Descriptor(
30 | name='TasksEntry',
31 | full_name='tensorflow.JobDef.TasksEntry',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='key', full_name='tensorflow.JobDef.TasksEntry.key', index=0,
38 | number=1, type=5, cpp_type=1, label=1,
39 | has_default_value=False, default_value=0,
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='value', full_name='tensorflow.JobDef.TasksEntry.value', index=1,
45 | number=2, type=9, cpp_type=9, label=1,
46 | has_default_value=False, default_value=_b("").decode('utf-8'),
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | ],
51 | extensions=[
52 | ],
53 | nested_types=[],
54 | enum_types=[
55 | ],
56 | options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
57 | is_extendable=False,
58 | syntax='proto3',
59 | extension_ranges=[],
60 | oneofs=[
61 | ],
62 | serialized_start=124,
63 | serialized_end=168,
64 | )
65 |
66 | _JOBDEF = _descriptor.Descriptor(
67 | name='JobDef',
68 | full_name='tensorflow.JobDef',
69 | filename=None,
70 | file=DESCRIPTOR,
71 | containing_type=None,
72 | fields=[
73 | _descriptor.FieldDescriptor(
74 | name='name', full_name='tensorflow.JobDef.name', index=0,
75 | number=1, type=9, cpp_type=9, label=1,
76 | has_default_value=False, default_value=_b("").decode('utf-8'),
77 | message_type=None, enum_type=None, containing_type=None,
78 | is_extension=False, extension_scope=None,
79 | options=None),
80 | _descriptor.FieldDescriptor(
81 | name='tasks', full_name='tensorflow.JobDef.tasks', index=1,
82 | number=2, type=11, cpp_type=10, label=3,
83 | has_default_value=False, default_value=[],
84 | message_type=None, enum_type=None, containing_type=None,
85 | is_extension=False, extension_scope=None,
86 | options=None),
87 | ],
88 | extensions=[
89 | ],
90 | nested_types=[_JOBDEF_TASKSENTRY, ],
91 | enum_types=[
92 | ],
93 | options=None,
94 | is_extendable=False,
95 | syntax='proto3',
96 | extension_ranges=[],
97 | oneofs=[
98 | ],
99 | serialized_start=54,
100 | serialized_end=168,
101 | )
102 |
103 |
104 | _CLUSTERDEF = _descriptor.Descriptor(
105 | name='ClusterDef',
106 | full_name='tensorflow.ClusterDef',
107 | filename=None,
108 | file=DESCRIPTOR,
109 | containing_type=None,
110 | fields=[
111 | _descriptor.FieldDescriptor(
112 | name='job', full_name='tensorflow.ClusterDef.job', index=0,
113 | number=1, type=11, cpp_type=10, label=3,
114 | has_default_value=False, default_value=[],
115 | message_type=None, enum_type=None, containing_type=None,
116 | is_extension=False, extension_scope=None,
117 | options=None),
118 | ],
119 | extensions=[
120 | ],
121 | nested_types=[],
122 | enum_types=[
123 | ],
124 | options=None,
125 | is_extendable=False,
126 | syntax='proto3',
127 | extension_ranges=[],
128 | oneofs=[
129 | ],
130 | serialized_start=170,
131 | serialized_end=215,
132 | )
133 |
134 | _JOBDEF_TASKSENTRY.containing_type = _JOBDEF
135 | _JOBDEF.fields_by_name['tasks'].message_type = _JOBDEF_TASKSENTRY
136 | _CLUSTERDEF.fields_by_name['job'].message_type = _JOBDEF
137 | DESCRIPTOR.message_types_by_name['JobDef'] = _JOBDEF
138 | DESCRIPTOR.message_types_by_name['ClusterDef'] = _CLUSTERDEF
139 |
140 | JobDef = _reflection.GeneratedProtocolMessageType('JobDef', (_message.Message,), dict(
141 |
142 | TasksEntry = _reflection.GeneratedProtocolMessageType('TasksEntry', (_message.Message,), dict(
143 | DESCRIPTOR = _JOBDEF_TASKSENTRY,
144 | __module__ = 'tensorflow.core.protobuf.cluster_pb2'
145 | # @@protoc_insertion_point(class_scope:tensorflow.JobDef.TasksEntry)
146 | ))
147 | ,
148 | DESCRIPTOR = _JOBDEF,
149 | __module__ = 'tensorflow.core.protobuf.cluster_pb2'
150 | # @@protoc_insertion_point(class_scope:tensorflow.JobDef)
151 | ))
152 | _sym_db.RegisterMessage(JobDef)
153 | _sym_db.RegisterMessage(JobDef.TasksEntry)
154 |
155 | ClusterDef = _reflection.GeneratedProtocolMessageType('ClusterDef', (_message.Message,), dict(
156 | DESCRIPTOR = _CLUSTERDEF,
157 | __module__ = 'tensorflow.core.protobuf.cluster_pb2'
158 | # @@protoc_insertion_point(class_scope:tensorflow.ClusterDef)
159 | ))
160 | _sym_db.RegisterMessage(ClusterDef)
161 |
162 |
163 | DESCRIPTOR.has_options = True
164 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032org.tensorflow.distruntimeB\rClusterProtosP\001\370\001\001'))
165 | _JOBDEF_TASKSENTRY.has_options = True
166 | _JOBDEF_TASKSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
167 | # @@protoc_insertion_point(module_scope)
168 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow_serving/apis/prediction_service_pb2_grpc.py:
--------------------------------------------------------------------------------
1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2 | import grpc
3 |
4 | from ..apis import classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2
5 | from..apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
6 | from ..apis import inference_pb2 as tensorflow__serving_dot_apis_dot_inference__pb2
7 | from ..apis import predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2
8 | from ..apis import regression_pb2 as tensorflow__serving_dot_apis_dot_regression__pb2
9 |
10 |
11 | class PredictionServiceStub(object):
12 | """open source marker; do not remove
13 | PredictionService provides access to machine-learned models loaded by
14 | model_servers.
15 | """
16 |
17 | def __init__(self, channel):
18 | """Constructor.
19 |
20 | Args:
21 | channel: A grpc.Channel.
22 | """
23 | self.Classify = channel.unary_unary(
24 | '/tensorflow.serving.PredictionService/Classify',
25 | request_serializer=tensorflow__serving_dot_apis_dot_classification__pb2.ClassificationRequest.SerializeToString,
26 | response_deserializer=tensorflow__serving_dot_apis_dot_classification__pb2.ClassificationResponse.FromString,
27 | )
28 | self.Regress = channel.unary_unary(
29 | '/tensorflow.serving.PredictionService/Regress',
30 | request_serializer=tensorflow__serving_dot_apis_dot_regression__pb2.RegressionRequest.SerializeToString,
31 | response_deserializer=tensorflow__serving_dot_apis_dot_regression__pb2.RegressionResponse.FromString,
32 | )
33 | self.Predict = channel.unary_unary(
34 | '/tensorflow.serving.PredictionService/Predict',
35 | request_serializer=tensorflow__serving_dot_apis_dot_predict__pb2.PredictRequest.SerializeToString,
36 | response_deserializer=tensorflow__serving_dot_apis_dot_predict__pb2.PredictResponse.FromString,
37 | )
38 | self.MultiInference = channel.unary_unary(
39 | '/tensorflow.serving.PredictionService/MultiInference',
40 | request_serializer=tensorflow__serving_dot_apis_dot_inference__pb2.MultiInferenceRequest.SerializeToString,
41 | response_deserializer=tensorflow__serving_dot_apis_dot_inference__pb2.MultiInferenceResponse.FromString,
42 | )
43 | self.GetModelMetadata = channel.unary_unary(
44 | '/tensorflow.serving.PredictionService/GetModelMetadata',
45 | request_serializer=tensorflow__serving_dot_apis_dot_get__model__metadata__pb2.GetModelMetadataRequest.SerializeToString,
46 | response_deserializer=tensorflow__serving_dot_apis_dot_get__model__metadata__pb2.GetModelMetadataResponse.FromString,
47 | )
48 |
49 |
50 | class PredictionServiceServicer(object):
51 | """open source marker; do not remove
52 | PredictionService provides access to machine-learned models loaded by
53 | model_servers.
54 | """
55 |
56 | def Classify(self, request, context):
57 | """Classify.
58 | """
59 | context.set_code(grpc.StatusCode.UNIMPLEMENTED)
60 | context.set_details('Method not implemented!')
61 | raise NotImplementedError('Method not implemented!')
62 |
63 | def Regress(self, request, context):
64 | """Regress.
65 | """
66 | context.set_code(grpc.StatusCode.UNIMPLEMENTED)
67 | context.set_details('Method not implemented!')
68 | raise NotImplementedError('Method not implemented!')
69 |
70 | def Predict(self, request, context):
71 | """Predict -- provides access to loaded TensorFlow model.
72 | """
73 | context.set_code(grpc.StatusCode.UNIMPLEMENTED)
74 | context.set_details('Method not implemented!')
75 | raise NotImplementedError('Method not implemented!')
76 |
77 | def MultiInference(self, request, context):
78 | """MultiInference API for multi-headed models.
79 | """
80 | context.set_code(grpc.StatusCode.UNIMPLEMENTED)
81 | context.set_details('Method not implemented!')
82 | raise NotImplementedError('Method not implemented!')
83 |
84 | def GetModelMetadata(self, request, context):
85 | """GetModelMetadata - provides access to metadata for loaded models.
86 | """
87 | context.set_code(grpc.StatusCode.UNIMPLEMENTED)
88 | context.set_details('Method not implemented!')
89 | raise NotImplementedError('Method not implemented!')
90 |
91 |
92 | def add_PredictionServiceServicer_to_server(servicer, server):
93 | rpc_method_handlers = {
94 | 'Classify': grpc.unary_unary_rpc_method_handler(
95 | servicer.Classify,
96 | request_deserializer=tensorflow__serving_dot_apis_dot_classification__pb2.ClassificationRequest.FromString,
97 | response_serializer=tensorflow__serving_dot_apis_dot_classification__pb2.ClassificationResponse.SerializeToString,
98 | ),
99 | 'Regress': grpc.unary_unary_rpc_method_handler(
100 | servicer.Regress,
101 | request_deserializer=tensorflow__serving_dot_apis_dot_regression__pb2.RegressionRequest.FromString,
102 | response_serializer=tensorflow__serving_dot_apis_dot_regression__pb2.RegressionResponse.SerializeToString,
103 | ),
104 | 'Predict': grpc.unary_unary_rpc_method_handler(
105 | servicer.Predict,
106 | request_deserializer=tensorflow__serving_dot_apis_dot_predict__pb2.PredictRequest.FromString,
107 | response_serializer=tensorflow__serving_dot_apis_dot_predict__pb2.PredictResponse.SerializeToString,
108 | ),
109 | 'MultiInference': grpc.unary_unary_rpc_method_handler(
110 | servicer.MultiInference,
111 | request_deserializer=tensorflow__serving_dot_apis_dot_inference__pb2.MultiInferenceRequest.FromString,
112 | response_serializer=tensorflow__serving_dot_apis_dot_inference__pb2.MultiInferenceResponse.SerializeToString,
113 | ),
114 | 'GetModelMetadata': grpc.unary_unary_rpc_method_handler(
115 | servicer.GetModelMetadata,
116 | request_deserializer=tensorflow__serving_dot_apis_dot_get__model__metadata__pb2.GetModelMetadataRequest.FromString,
117 | response_serializer=tensorflow__serving_dot_apis_dot_get__model__metadata__pb2.GetModelMetadataResponse.SerializeToString,
118 | ),
119 | }
120 | generic_handler = grpc.method_handlers_generic_handler(
121 | 'tensorflow.serving.PredictionService', rpc_method_handlers)
122 | server.add_generic_rpc_handlers((generic_handler,))
123 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/node_def_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/node_def.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..framework import attr_value_pb2 as tensorflow_dot_core_dot_framework_dot_attr__value__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/framework/node_def.proto',
21 | package='tensorflow',
22 | syntax='proto3',
23 | serialized_pb=_b('\n(tensorflow/core/framework/node_def.proto\x12\ntensorflow\x1a*tensorflow/core/framework/attr_value.proto\"\xb3\x01\n\x07NodeDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\n\n\x02op\x18\x02 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12\x0e\n\x06\x64\x65vice\x18\x04 \x01(\t\x12+\n\x04\x61ttr\x18\x05 \x03(\x0b\x32\x1d.tensorflow.NodeDef.AttrEntry\x1a\x42\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.tensorflow.AttrValue:\x02\x38\x01\x42*\n\x18org.tensorflow.frameworkB\tNodeProtoP\x01\xf8\x01\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_framework_dot_attr__value__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 | _NODEDEF_ATTRENTRY = _descriptor.Descriptor(
32 | name='AttrEntry',
33 | full_name='tensorflow.NodeDef.AttrEntry',
34 | filename=None,
35 | file=DESCRIPTOR,
36 | containing_type=None,
37 | fields=[
38 | _descriptor.FieldDescriptor(
39 | name='key', full_name='tensorflow.NodeDef.AttrEntry.key', index=0,
40 | number=1, type=9, cpp_type=9, label=1,
41 | has_default_value=False, default_value=_b("").decode('utf-8'),
42 | message_type=None, enum_type=None, containing_type=None,
43 | is_extension=False, extension_scope=None,
44 | options=None),
45 | _descriptor.FieldDescriptor(
46 | name='value', full_name='tensorflow.NodeDef.AttrEntry.value', index=1,
47 | number=2, type=11, cpp_type=10, label=1,
48 | has_default_value=False, default_value=None,
49 | message_type=None, enum_type=None, containing_type=None,
50 | is_extension=False, extension_scope=None,
51 | options=None),
52 | ],
53 | extensions=[
54 | ],
55 | nested_types=[],
56 | enum_types=[
57 | ],
58 | options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
59 | is_extendable=False,
60 | syntax='proto3',
61 | extension_ranges=[],
62 | oneofs=[
63 | ],
64 | serialized_start=214,
65 | serialized_end=280,
66 | )
67 |
68 | _NODEDEF = _descriptor.Descriptor(
69 | name='NodeDef',
70 | full_name='tensorflow.NodeDef',
71 | filename=None,
72 | file=DESCRIPTOR,
73 | containing_type=None,
74 | fields=[
75 | _descriptor.FieldDescriptor(
76 | name='name', full_name='tensorflow.NodeDef.name', index=0,
77 | number=1, type=9, cpp_type=9, label=1,
78 | has_default_value=False, default_value=_b("").decode('utf-8'),
79 | message_type=None, enum_type=None, containing_type=None,
80 | is_extension=False, extension_scope=None,
81 | options=None),
82 | _descriptor.FieldDescriptor(
83 | name='op', full_name='tensorflow.NodeDef.op', index=1,
84 | number=2, type=9, cpp_type=9, label=1,
85 | has_default_value=False, default_value=_b("").decode('utf-8'),
86 | message_type=None, enum_type=None, containing_type=None,
87 | is_extension=False, extension_scope=None,
88 | options=None),
89 | _descriptor.FieldDescriptor(
90 | name='input', full_name='tensorflow.NodeDef.input', index=2,
91 | number=3, type=9, cpp_type=9, label=3,
92 | has_default_value=False, default_value=[],
93 | message_type=None, enum_type=None, containing_type=None,
94 | is_extension=False, extension_scope=None,
95 | options=None),
96 | _descriptor.FieldDescriptor(
97 | name='device', full_name='tensorflow.NodeDef.device', index=3,
98 | number=4, type=9, cpp_type=9, label=1,
99 | has_default_value=False, default_value=_b("").decode('utf-8'),
100 | message_type=None, enum_type=None, containing_type=None,
101 | is_extension=False, extension_scope=None,
102 | options=None),
103 | _descriptor.FieldDescriptor(
104 | name='attr', full_name='tensorflow.NodeDef.attr', index=4,
105 | number=5, type=11, cpp_type=10, label=3,
106 | has_default_value=False, default_value=[],
107 | message_type=None, enum_type=None, containing_type=None,
108 | is_extension=False, extension_scope=None,
109 | options=None),
110 | ],
111 | extensions=[
112 | ],
113 | nested_types=[_NODEDEF_ATTRENTRY, ],
114 | enum_types=[
115 | ],
116 | options=None,
117 | is_extendable=False,
118 | syntax='proto3',
119 | extension_ranges=[],
120 | oneofs=[
121 | ],
122 | serialized_start=101,
123 | serialized_end=280,
124 | )
125 |
126 | _NODEDEF_ATTRENTRY.fields_by_name['value'].message_type = tensorflow_dot_core_dot_framework_dot_attr__value__pb2._ATTRVALUE
127 | _NODEDEF_ATTRENTRY.containing_type = _NODEDEF
128 | _NODEDEF.fields_by_name['attr'].message_type = _NODEDEF_ATTRENTRY
129 | DESCRIPTOR.message_types_by_name['NodeDef'] = _NODEDEF
130 |
131 | NodeDef = _reflection.GeneratedProtocolMessageType('NodeDef', (_message.Message,), dict(
132 |
133 | AttrEntry = _reflection.GeneratedProtocolMessageType('AttrEntry', (_message.Message,), dict(
134 | DESCRIPTOR = _NODEDEF_ATTRENTRY,
135 | __module__ = 'tensorflow.core.framework.node_def_pb2'
136 | # @@protoc_insertion_point(class_scope:tensorflow.NodeDef.AttrEntry)
137 | ))
138 | ,
139 | DESCRIPTOR = _NODEDEF,
140 | __module__ = 'tensorflow.core.framework.node_def_pb2'
141 | # @@protoc_insertion_point(class_scope:tensorflow.NodeDef)
142 | ))
143 | _sym_db.RegisterMessage(NodeDef)
144 | _sym_db.RegisterMessage(NodeDef.AttrEntry)
145 |
146 |
147 | DESCRIPTOR.has_options = True
148 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\tNodeProtoP\001\370\001\001'))
149 | _NODEDEF_ATTRENTRY.has_options = True
150 | _NODEDEF_ATTRENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
151 | # @@protoc_insertion_point(module_scope)
152 |
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/kernel_def_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/kernel_def.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 | from ..framework import attr_value_pb2 as tensorflow_dot_core_dot_framework_dot_attr__value__pb2
17 |
18 |
19 | DESCRIPTOR = _descriptor.FileDescriptor(
20 | name='tensorflow/core/framework/kernel_def.proto',
21 | package='tensorflow',
22 | syntax='proto3',
23 | serialized_pb=_b('\n*tensorflow/core/framework/kernel_def.proto\x12\ntensorflow\x1a*tensorflow/core/framework/attr_value.proto\"\xdd\x01\n\tKernelDef\x12\n\n\x02op\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12\x38\n\nconstraint\x18\x03 \x03(\x0b\x32$.tensorflow.KernelDef.AttrConstraint\x12\x17\n\x0fhost_memory_arg\x18\x04 \x03(\t\x12\r\n\x05label\x18\x05 \x01(\t\x1aM\n\x0e\x41ttrConstraint\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x0e\x61llowed_values\x18\x02 \x01(\x0b\x32\x15.tensorflow.AttrValueB0\n\x18org.tensorflow.frameworkB\x0fKernelDefProtosP\x01\xf8\x01\x01\x62\x06proto3')
24 | ,
25 | dependencies=[tensorflow_dot_core_dot_framework_dot_attr__value__pb2.DESCRIPTOR,])
26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
27 |
28 |
29 |
30 |
31 | _KERNELDEF_ATTRCONSTRAINT = _descriptor.Descriptor(
32 | name='AttrConstraint',
33 | full_name='tensorflow.KernelDef.AttrConstraint',
34 | filename=None,
35 | file=DESCRIPTOR,
36 | containing_type=None,
37 | fields=[
38 | _descriptor.FieldDescriptor(
39 | name='name', full_name='tensorflow.KernelDef.AttrConstraint.name', index=0,
40 | number=1, type=9, cpp_type=9, label=1,
41 | has_default_value=False, default_value=_b("").decode('utf-8'),
42 | message_type=None, enum_type=None, containing_type=None,
43 | is_extension=False, extension_scope=None,
44 | options=None),
45 | _descriptor.FieldDescriptor(
46 | name='allowed_values', full_name='tensorflow.KernelDef.AttrConstraint.allowed_values', index=1,
47 | number=2, type=11, cpp_type=10, label=1,
48 | has_default_value=False, default_value=None,
49 | message_type=None, enum_type=None, containing_type=None,
50 | is_extension=False, extension_scope=None,
51 | options=None),
52 | ],
53 | extensions=[
54 | ],
55 | nested_types=[],
56 | enum_types=[
57 | ],
58 | options=None,
59 | is_extendable=False,
60 | syntax='proto3',
61 | extension_ranges=[],
62 | oneofs=[
63 | ],
64 | serialized_start=247,
65 | serialized_end=324,
66 | )
67 |
68 | _KERNELDEF = _descriptor.Descriptor(
69 | name='KernelDef',
70 | full_name='tensorflow.KernelDef',
71 | filename=None,
72 | file=DESCRIPTOR,
73 | containing_type=None,
74 | fields=[
75 | _descriptor.FieldDescriptor(
76 | name='op', full_name='tensorflow.KernelDef.op', index=0,
77 | number=1, type=9, cpp_type=9, label=1,
78 | has_default_value=False, default_value=_b("").decode('utf-8'),
79 | message_type=None, enum_type=None, containing_type=None,
80 | is_extension=False, extension_scope=None,
81 | options=None),
82 | _descriptor.FieldDescriptor(
83 | name='device_type', full_name='tensorflow.KernelDef.device_type', index=1,
84 | number=2, type=9, cpp_type=9, label=1,
85 | has_default_value=False, default_value=_b("").decode('utf-8'),
86 | message_type=None, enum_type=None, containing_type=None,
87 | is_extension=False, extension_scope=None,
88 | options=None),
89 | _descriptor.FieldDescriptor(
90 | name='constraint', full_name='tensorflow.KernelDef.constraint', index=2,
91 | number=3, type=11, cpp_type=10, label=3,
92 | has_default_value=False, default_value=[],
93 | message_type=None, enum_type=None, containing_type=None,
94 | is_extension=False, extension_scope=None,
95 | options=None),
96 | _descriptor.FieldDescriptor(
97 | name='host_memory_arg', full_name='tensorflow.KernelDef.host_memory_arg', index=3,
98 | number=4, type=9, cpp_type=9, label=3,
99 | has_default_value=False, default_value=[],
100 | message_type=None, enum_type=None, containing_type=None,
101 | is_extension=False, extension_scope=None,
102 | options=None),
103 | _descriptor.FieldDescriptor(
104 | name='label', full_name='tensorflow.KernelDef.label', index=4,
105 | number=5, type=9, cpp_type=9, label=1,
106 | has_default_value=False, default_value=_b("").decode('utf-8'),
107 | message_type=None, enum_type=None, containing_type=None,
108 | is_extension=False, extension_scope=None,
109 | options=None),
110 | ],
111 | extensions=[
112 | ],
113 | nested_types=[_KERNELDEF_ATTRCONSTRAINT, ],
114 | enum_types=[
115 | ],
116 | options=None,
117 | is_extendable=False,
118 | syntax='proto3',
119 | extension_ranges=[],
120 | oneofs=[
121 | ],
122 | serialized_start=103,
123 | serialized_end=324,
124 | )
125 |
126 | _KERNELDEF_ATTRCONSTRAINT.fields_by_name['allowed_values'].message_type = tensorflow_dot_core_dot_framework_dot_attr__value__pb2._ATTRVALUE
127 | _KERNELDEF_ATTRCONSTRAINT.containing_type = _KERNELDEF
128 | _KERNELDEF.fields_by_name['constraint'].message_type = _KERNELDEF_ATTRCONSTRAINT
129 | DESCRIPTOR.message_types_by_name['KernelDef'] = _KERNELDEF
130 |
131 | KernelDef = _reflection.GeneratedProtocolMessageType('KernelDef', (_message.Message,), dict(
132 |
133 | AttrConstraint = _reflection.GeneratedProtocolMessageType('AttrConstraint', (_message.Message,), dict(
134 | DESCRIPTOR = _KERNELDEF_ATTRCONSTRAINT,
135 | __module__ = 'tensorflow.core.framework.kernel_def_pb2'
136 | # @@protoc_insertion_point(class_scope:tensorflow.KernelDef.AttrConstraint)
137 | ))
138 | ,
139 | DESCRIPTOR = _KERNELDEF,
140 | __module__ = 'tensorflow.core.framework.kernel_def_pb2'
141 | # @@protoc_insertion_point(class_scope:tensorflow.KernelDef)
142 | ))
143 | _sym_db.RegisterMessage(KernelDef)
144 | _sym_db.RegisterMessage(KernelDef.AttrConstraint)
145 |
146 |
147 | DESCRIPTOR.has_options = True
148 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\017KernelDefProtosP\001\370\001\001'))
149 | # @@protoc_insertion_point(module_scope)
150 |
--------------------------------------------------------------------------------
/pythonlib/tests/unit_tests/test_consumption_client.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License
3 | import pytest
4 | import os
5 | import tempfile
6 | import grpc
7 | import tensorflow as tf
8 | import numpy as np
9 | from unittest import mock
10 | from datetime import datetime, timedelta
11 |
12 | try:
13 | from tensorflow.core.framework import tensor_shape_pb2
14 | from tensorflow.core.framework import types_pb2
15 | except ImportError:
16 | from .tensorflow.core.framework import tensor_shape_pb2
17 | from .tensorflow.core.framework import types_pb2
18 |
19 | from amlrealtimeai.client import PredictionClient
20 |
21 | def test_create_client():
22 | client = PredictionClient("localhost", 50051)
23 | assert client is not None
24 |
25 | def test_create_client_with_auth():
26 | client = PredictionClient("localhost", 50051, True, "key1")
27 | assert client is not None
28 |
29 | def test_create_client_raises_if_host_is_none():
30 | with pytest.raises(ValueError):
31 | PredictionClient(None, 50051)
32 |
33 | def test_create_client_raises_if_port_is_none():
34 | with pytest.raises(ValueError):
35 | PredictionClient("localhost", None)
36 |
37 |
38 | def test_score_image():
39 |
40 | def predict_mock(request, timeout):
41 | inputs = request.inputs['images'].string_val
42 | assert inputs[0].decode('utf-8') == "abc"
43 | return_data = np.asarray([[ 1, 2, 3 ]])
44 | return_tensor = tf.contrib.util.make_tensor_proto(return_data, types_pb2.DT_FLOAT, return_data.shape)
45 | result = mock.MagicMock()
46 | result.outputs = { "output_alias": return_tensor }
47 | return result
48 |
49 | stub_mock = mock.Mock()
50 | stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)
51 |
52 | image_file_path = os.path.join(tempfile.mkdtemp(), "img.png")
53 | image_file = open(image_file_path, "w")
54 | image_file.write("abc")
55 | image_file.close()
56 |
57 | client = PredictionClient("localhost", 50051)
58 | client._get_grpc_stub = lambda: stub_mock
59 |
60 | result = client.score_image(image_file_path)
61 | assert all([x == y for x, y in zip(result, [1, 2, 3])])
62 |
63 |
64 | def test_score_numpy_array():
65 |
66 | def predict_mock(request, timeout):
67 | inputs = tf.contrib.util.make_ndarray(request.inputs['images'])
68 | assert all([x == y for x, y in zip(inputs[0], [ 1, 2, 3 ])])
69 | assert all([x == y for x, y in zip(inputs[1], [ 4, 5, 6 ])])
70 |
71 | return_data = np.asarray([[ 11, 22, 33 ], [ 44, 55, 66 ]])
72 | return_tensor = tf.contrib.util.make_tensor_proto(return_data, types_pb2.DT_FLOAT, return_data.shape)
73 | result = mock.MagicMock()
74 | result.outputs = { "output_alias": return_tensor }
75 | return result
76 |
77 | stub_mock = mock.Mock()
78 | stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)
79 |
80 | client = PredictionClient("localhost", 50051)
81 | client._get_grpc_stub = lambda: stub_mock
82 |
83 | result = client.score_numpy_array(np.asarray([[1, 2, 3], [4, 5, 6]], dtype='f'))
84 | assert all([x == y for x, y in zip(result[0], [ 11, 22, 33 ])])
85 | assert all([x == y for x, y in zip(result[1], [ 44, 55, 66 ])])
86 |
87 |
88 | def test_retrying_rpc_exception():
89 |
90 | first_call = [ True ]
91 |
92 | channel_mock_loaded = { 'value': 0 }
93 | channel_mock_closed = { 'value': 0 }
94 |
95 | def unary_unary(id, request_serializer, response_deserializer):
96 | result = mock.MagicMock()
97 | if id == '/tensorflow.serving.PredictionService/Predict':
98 | if(first_call[0]):
99 | first_call[0] = False
100 | return lambda req, timeout: (_ for _ in ()).throw(grpc.RpcError())
101 |
102 | return_data = np.asarray([[ 11, 22 ]])
103 | return_tensor = tf.contrib.util.make_tensor_proto(return_data, types_pb2.DT_FLOAT, return_data.shape)
104 | result.outputs = { "output_alias": return_tensor }
105 | return lambda req, timeout: result
106 |
107 | def load_channel_mock():
108 | channel_mock_loaded['value'] += 1
109 | return channel_mock
110 |
111 | def close_channel_mock():
112 | channel_mock_closed['value'] += 1
113 |
114 | now = datetime.now()
115 |
116 | channel_mock = mock.Mock()
117 | channel_mock.unary_unary = mock.MagicMock(side_effect=unary_unary)
118 | channel_mock.close = close_channel_mock
119 |
120 | client = PredictionClient("localhost", 50051, channel_shutdown_timeout=timedelta(minutes=1))
121 | client._channel_func = load_channel_mock
122 | client._get_datetime_now = lambda: now
123 |
124 | result = client.score_numpy_array(np.asarray([[1, 2]], dtype='f'))
125 | assert all([x == y for x, y in zip(result[0], [ 11, 22 ])])
126 |
127 | assert channel_mock_loaded['value'] == 2
128 | assert channel_mock_closed['value'] == 1
129 |
130 | def test_create_new_channel_after_timeout_expires():
131 |
132 | channel_mock_loaded = { 'value': 0 }
133 |
134 | def unary_unary(id, request_serializer, response_deserializer):
135 | result = mock.MagicMock()
136 | if id == '/tensorflow.serving.PredictionService/Predict':
137 | return_data = np.asarray([[ 1, 2, 3 ]])
138 | return_tensor = tf.contrib.util.make_tensor_proto(return_data, types_pb2.DT_FLOAT, return_data.shape)
139 | result.outputs = { "output_alias": return_tensor }
140 | return lambda req, timeout: result
141 |
142 | def load_channel_mock():
143 | channel_mock_loaded['value'] += 1
144 | return channel_mock
145 |
146 | now = datetime.now()
147 |
148 | channel_mock = mock.Mock()
149 | channel_mock.unary_unary = mock.MagicMock(side_effect=unary_unary)
150 |
151 | image_file_path = os.path.join(tempfile.mkdtemp(), "img.png")
152 | image_file = open(image_file_path, "w")
153 | image_file.write("abc")
154 | image_file.close()
155 |
156 | client = PredictionClient("localhost", 50051, channel_shutdown_timeout=timedelta(minutes=1))
157 | client._channel_func = load_channel_mock
158 | client._get_datetime_now = lambda: now
159 |
160 | result = client.score_image(image_file_path)
161 | assert all([x == y for x, y in zip(result, [1, 2, 3])])
162 | assert channel_mock_loaded['value'] == 1
163 |
164 | now = now + timedelta(seconds=50)
165 | result = client.score_image(image_file_path)
166 | assert all([x == y for x, y in zip(result, [1, 2, 3])])
167 | assert channel_mock_loaded['value'] == 1
168 |
169 | now = now + timedelta(seconds=20)
170 | result = client.score_image(image_file_path)
171 | assert all([x == y for x, y in zip(result, [1, 2, 3])])
172 | assert channel_mock_loaded['value'] == 1
173 |
174 | now = now + timedelta(seconds=70)
175 | result = client.score_image(image_file_path)
176 | assert all([x == y for x, y in zip(result, [1, 2, 3])])
177 | assert channel_mock_loaded['value'] == 2
--------------------------------------------------------------------------------
/sample-clients/csharp/protos/tensorflow/Types.cs:
--------------------------------------------------------------------------------
1 | // Generated by the protocol buffer compiler. DO NOT EDIT!
2 | // source: tensorflow/core/framework/types.proto
3 | #pragma warning disable 1591, 0612, 3021
4 | #region Designer generated code
5 |
6 | using pb = global::Google.Protobuf;
7 | using pbc = global::Google.Protobuf.Collections;
8 | using pbr = global::Google.Protobuf.Reflection;
9 | using scg = global::System.Collections.Generic;
10 | namespace Tensorflow {
11 |
12 | /// Holder for reflection information generated from tensorflow/core/framework/types.proto
13 | public static partial class TypesReflection {
14 |
15 | #region Descriptor
16 | /// File descriptor for tensorflow/core/framework/types.proto
17 | public static pbr::FileDescriptor Descriptor {
18 | get { return descriptor; }
19 | }
20 | private static pbr::FileDescriptor descriptor;
21 |
22 | static TypesReflection() {
23 | byte[] descriptorData = global::System.Convert.FromBase64String(
24 | string.Concat(
25 | "CiV0ZW5zb3JmbG93L2NvcmUvZnJhbWV3b3JrL3R5cGVzLnByb3RvEgp0ZW5z",
26 | "b3JmbG93KqoGCghEYXRhVHlwZRIOCgpEVF9JTlZBTElEEAASDAoIRFRfRkxP",
27 | "QVQQARINCglEVF9ET1VCTEUQAhIMCghEVF9JTlQzMhADEgwKCERUX1VJTlQ4",
28 | "EAQSDAoIRFRfSU5UMTYQBRILCgdEVF9JTlQ4EAYSDQoJRFRfU1RSSU5HEAcS",
29 | "EAoMRFRfQ09NUExFWDY0EAgSDAoIRFRfSU5UNjQQCRILCgdEVF9CT09MEAoS",
30 | "DAoIRFRfUUlOVDgQCxINCglEVF9RVUlOVDgQDBINCglEVF9RSU5UMzIQDRIP",
31 | "CgtEVF9CRkxPQVQxNhAOEg0KCURUX1FJTlQxNhAPEg4KCkRUX1FVSU5UMTYQ",
32 | "EBINCglEVF9VSU5UMTYQERIRCg1EVF9DT01QTEVYMTI4EBISCwoHRFRfSEFM",
33 | "RhATEg8KC0RUX1JFU09VUkNFEBQSDgoKRFRfVkFSSUFOVBAVEg0KCURUX1VJ",
34 | "TlQzMhAWEg0KCURUX1VJTlQ2NBAXEhAKDERUX0ZMT0FUX1JFRhBlEhEKDURU",
35 | "X0RPVUJMRV9SRUYQZhIQCgxEVF9JTlQzMl9SRUYQZxIQCgxEVF9VSU5UOF9S",
36 | "RUYQaBIQCgxEVF9JTlQxNl9SRUYQaRIPCgtEVF9JTlQ4X1JFRhBqEhEKDURU",
37 | "X1NUUklOR19SRUYQaxIUChBEVF9DT01QTEVYNjRfUkVGEGwSEAoMRFRfSU5U",
38 | "NjRfUkVGEG0SDwoLRFRfQk9PTF9SRUYQbhIQCgxEVF9RSU5UOF9SRUYQbxIR",
39 | "Cg1EVF9RVUlOVDhfUkVGEHASEQoNRFRfUUlOVDMyX1JFRhBxEhMKD0RUX0JG",
40 | "TE9BVDE2X1JFRhByEhEKDURUX1FJTlQxNl9SRUYQcxISCg5EVF9RVUlOVDE2",
41 | "X1JFRhB0EhEKDURUX1VJTlQxNl9SRUYQdRIVChFEVF9DT01QTEVYMTI4X1JF",
42 | "RhB2Eg8KC0RUX0hBTEZfUkVGEHcSEwoPRFRfUkVTT1VSQ0VfUkVGEHgSEgoO",
43 | "RFRfVkFSSUFOVF9SRUYQeRIRCg1EVF9VSU5UMzJfUkVGEHoSEQoNRFRfVUlO",
44 | "VDY0X1JFRhB7QiwKGG9yZy50ZW5zb3JmbG93LmZyYW1ld29ya0ILVHlwZXNQ",
45 | "cm90b3NQAfgBAWIGcHJvdG8z"));
46 | descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
47 | new pbr::FileDescriptor[] { },
48 | new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Tensorflow.DataType), }, null));
49 | }
50 | #endregion
51 |
52 | }
53 | #region Enums
54 | ///
55 | /// LINT.IfChange
56 | ///
57 | public enum DataType {
58 | ///
59 | /// Not a legal value for DataType. Used to indicate a DataType field
60 | /// has not been set.
61 | ///
62 | [pbr::OriginalName("DT_INVALID")] DtInvalid = 0,
63 | ///
64 | /// Data types that all computation devices are expected to be
65 | /// capable to support.
66 | ///
67 | [pbr::OriginalName("DT_FLOAT")] DtFloat = 1,
68 | [pbr::OriginalName("DT_DOUBLE")] DtDouble = 2,
69 | [pbr::OriginalName("DT_INT32")] DtInt32 = 3,
70 | [pbr::OriginalName("DT_UINT8")] DtUint8 = 4,
71 | [pbr::OriginalName("DT_INT16")] DtInt16 = 5,
72 | [pbr::OriginalName("DT_INT8")] DtInt8 = 6,
73 | [pbr::OriginalName("DT_STRING")] DtString = 7,
74 | ///
75 | /// Single-precision complex
76 | ///
77 | [pbr::OriginalName("DT_COMPLEX64")] DtComplex64 = 8,
78 | [pbr::OriginalName("DT_INT64")] DtInt64 = 9,
79 | [pbr::OriginalName("DT_BOOL")] DtBool = 10,
80 | ///
81 | /// Quantized int8
82 | ///
83 | [pbr::OriginalName("DT_QINT8")] DtQint8 = 11,
84 | ///
85 | /// Quantized uint8
86 | ///
87 | [pbr::OriginalName("DT_QUINT8")] DtQuint8 = 12,
88 | ///
89 | /// Quantized int32
90 | ///
91 | [pbr::OriginalName("DT_QINT32")] DtQint32 = 13,
92 | ///
93 | /// Float32 truncated to 16 bits. Only for cast ops.
94 | ///
95 | [pbr::OriginalName("DT_BFLOAT16")] DtBfloat16 = 14,
96 | ///
97 | /// Quantized int16
98 | ///
99 | [pbr::OriginalName("DT_QINT16")] DtQint16 = 15,
100 | ///
101 | /// Quantized uint16
102 | ///
103 | [pbr::OriginalName("DT_QUINT16")] DtQuint16 = 16,
104 | [pbr::OriginalName("DT_UINT16")] DtUint16 = 17,
105 | ///
106 | /// Double-precision complex
107 | ///
108 | [pbr::OriginalName("DT_COMPLEX128")] DtComplex128 = 18,
109 | [pbr::OriginalName("DT_HALF")] DtHalf = 19,
110 | [pbr::OriginalName("DT_RESOURCE")] DtResource = 20,
111 | ///
112 | /// Arbitrary C++ data types
113 | ///
114 | [pbr::OriginalName("DT_VARIANT")] DtVariant = 21,
115 | [pbr::OriginalName("DT_UINT32")] DtUint32 = 22,
116 | [pbr::OriginalName("DT_UINT64")] DtUint64 = 23,
117 | ///
118 | /// Do not use! These are only for parameters. Every enum above
119 | /// should have a corresponding value below (verified by types_test).
120 | ///
121 | [pbr::OriginalName("DT_FLOAT_REF")] DtFloatRef = 101,
122 | [pbr::OriginalName("DT_DOUBLE_REF")] DtDoubleRef = 102,
123 | [pbr::OriginalName("DT_INT32_REF")] DtInt32Ref = 103,
124 | [pbr::OriginalName("DT_UINT8_REF")] DtUint8Ref = 104,
125 | [pbr::OriginalName("DT_INT16_REF")] DtInt16Ref = 105,
126 | [pbr::OriginalName("DT_INT8_REF")] DtInt8Ref = 106,
127 | [pbr::OriginalName("DT_STRING_REF")] DtStringRef = 107,
128 | [pbr::OriginalName("DT_COMPLEX64_REF")] DtComplex64Ref = 108,
129 | [pbr::OriginalName("DT_INT64_REF")] DtInt64Ref = 109,
130 | [pbr::OriginalName("DT_BOOL_REF")] DtBoolRef = 110,
131 | [pbr::OriginalName("DT_QINT8_REF")] DtQint8Ref = 111,
132 | [pbr::OriginalName("DT_QUINT8_REF")] DtQuint8Ref = 112,
133 | [pbr::OriginalName("DT_QINT32_REF")] DtQint32Ref = 113,
134 | [pbr::OriginalName("DT_BFLOAT16_REF")] DtBfloat16Ref = 114,
135 | [pbr::OriginalName("DT_QINT16_REF")] DtQint16Ref = 115,
136 | [pbr::OriginalName("DT_QUINT16_REF")] DtQuint16Ref = 116,
137 | [pbr::OriginalName("DT_UINT16_REF")] DtUint16Ref = 117,
138 | [pbr::OriginalName("DT_COMPLEX128_REF")] DtComplex128Ref = 118,
139 | [pbr::OriginalName("DT_HALF_REF")] DtHalfRef = 119,
140 | [pbr::OriginalName("DT_RESOURCE_REF")] DtResourceRef = 120,
141 | [pbr::OriginalName("DT_VARIANT_REF")] DtVariantRef = 121,
142 | [pbr::OriginalName("DT_UINT32_REF")] DtUint32Ref = 122,
143 | [pbr::OriginalName("DT_UINT64_REF")] DtUint64Ref = 123,
144 | }
145 |
146 | #endregion
147 |
148 | }
149 |
150 | #endregion Designer generated code
151 |
--------------------------------------------------------------------------------
/docs/SSL-and-auth.md:
--------------------------------------------------------------------------------
1 | # SSL/TLS and Authentication
2 |
3 | Machine Learning models are assets, created by experienced professionals using valuable data and compute time.
4 | It is important to secure customer data and company assets.
5 | Azure Machine Learning enables you to do this by providing SSL support and key authentication.
6 | This document provides an overview of:
7 | * enabling SSL/TLS and Authentication in the Azure Machine Learning hardware accelerated inference service
8 | * consuming the services with authentication from Python and C# using the demo clients
9 | * adding authentication to other generated gRPC clients
10 | * consuming the services with self signed certificates for dev/test purposes
11 |
12 | > [!Note]
13 | > The contents of this document are only applicable to Azure Machine Learning Real-Time AI (FPGA) models. For standard Azure Machine Learning services, refer to the document [here](https://docs.microsoft.com/en-us/azure/machine-learning/preview/how-to-setup-ssl-on-mlc).
14 | ## Enabling SSL/TLS and Authentication
15 |
16 | > [!IMPORTANT]
17 | > Authentication is only enabled for services that have enabled SSL by providing a certificate and key.
18 | > If you do not enable SSL, any user on the internet will be able make calls on the service.
19 | > If you enable SSL, an authentication key will be required to consume the service.
20 |
21 | SSL ensures that a client is connected to the server it expects, as well as ensures that communication between the server and client is secure.
22 |
23 | You can either deploy a service with SSL enabled, or update an already deployed service to enable it.
24 | Either way you will follow the same basic steps:
25 |
26 | 1. Acquire an SSL certificate
27 | 2. Deploy or update the service with SSL enabled
28 | 3. Update your DNS to point to the service
29 |
30 | ### 1. Acquire an SSL certificate
31 | Acquire an SSL certificate for the web address you expect the service to be located at. The certificate's common name must be a Fully Qualified Domain Name, not an IP address.
32 |
33 | [//]: # (TODO: coverste - determine if we support wildcard certs, if not remove the below.)
34 | > [!NOTE]
35 | > You can use a wildcard certificate for development and testing, however you should not use it for any production services.
36 |
37 | The certificate and key should be in two pem-encoded files:
38 | * A file for the certificate, for example, cert.pem. Make sure the file has the full certificate chain.
39 | * A file for the key, for example, key.pem
40 |
41 | Other formats can generally be converted to pem using tools like *openssl*.
42 |
43 | > [!Note]
44 | > If using a self-signed certificate, you'll need to do some extra work to consume the service. See [below](#consuming-services-secured-with-self-signed-certificates).
45 | ### 2. Deploy or update the service with SSL enabled
46 | To deploy with SSL enabled, you make a call like you would for any other service, but pass `ssl_enabled=True`, and the contents of `cert.pem` to `ssl_certificate` and `key.pem` to `ssl_key`.
47 | For example:
48 | ```python
49 | from amlrealtimeai import DeploymentClient
50 |
51 | subscription_id = ""
52 | resource_group = ""
53 | model_management_account = ""
54 | location = "eastus2"
55 |
56 | model_name = "resnet50-model"
57 | service_name = "quickstart-service"
58 |
59 | deployment_client = DeploymentClient(subscription_id, resource_group, model_management_account, location)
60 |
61 | with open('cert.pem','r') as cert_file:
62 | with open('key.pem','r') as key_file:
63 | cert = cert_file.read()
64 | key = key_file.read()
65 | service = deployment_client.create_service(service_name, model_id, ssl_enabled=True, ssl_certificate=cert, ssl_key=key)
66 | ```
67 | Make note of the response to the call. You'll need the IP address to finish setting up SSL, and the Primary Key and Secondary Key to consume the service.
68 | ### 3. Update DNS
69 | Update the DNS record for your domain name to resolve to the IP address of your service. Remember that the DNS name must match the certificate common name.
70 |
71 | ## Consuming authenticated services using Sample Clients
72 | ### Consuming authenticated services using Python
73 | Example:
74 | ```python
75 | from amlrealtimeai import PredictionClient
76 | client = PredictionClient(service.ipAddress, service.port, use_ssl=True, access_token="authKey")
77 | image_file = R'C:\path_to_file\image.jpg'
78 | results = client.score_image(image_file)
79 | ```
80 | ### Consuming authenticated services using C#
81 | ```csharp
82 | var client = new ScoringClient(host, 50051, useSSL, "authKey");
83 | float[,] result;
84 | using (var content = File.OpenRead(image))
85 | {
86 | IScoringRequest request = new ImageRequest(content);
87 | result = client.Score(request);
88 | }
89 | ```
90 | ## Consuming authenticated services using other gRPC clients
91 | Azure Machine Learning authenticates clients by checking that the request contains a valid authorization header.
92 |
93 | The general approach is to create a ChannelCredentials (or your language's equivalent), that combines SslCredentials with a CallCredentials that adds the authorization header to the metadata.
94 |
95 | For example, in C#:
96 | ```csharp
97 | creds = ChannelCredentials.Create(baseCreds, CallCredentials.FromInterceptor(
98 | async (context, metadata) =>
99 | {
100 | metadata.Add(new Metadata.Entry("authorization", "authKey"));
101 | await Task.CompletedTask;
102 | }));
103 |
104 | ```
105 | or in Go:
106 | ```go
107 | conn, err := grpc.Dial(serverAddr,
108 | grpc.WithTransportCredentials(credentials.NewClientTLSFromCert(nil, "")),
109 | grpc.WithPerRPCCredentials(&authCreds{
110 | Key: "authKey"}))
111 |
112 | type authCreds struct {
113 | Key string
114 | }
115 |
116 | func (c *authCreds) GetRequestMetadata(context.Context, uri ...string) (map[string]string, error) {
117 | return map[string]string{
118 | "authorization": c.Key,
119 | }, nil
120 | }
121 |
122 | func (c *authCreds) RequireTransportSecurity() bool {
123 | return true
124 | }
125 | ```
126 | See the [grpc docs](https://grpc.io/docs/guides/auth.html) for more information on how to implement support for your specific headers. Generally, you're looking for information about how to attach authentication metadata.
127 |
128 | ## Consuming services secured with self-signed certificates
129 | > [!IMPORTANT]
130 | > You should not use services secured with self-signed certificates in production.
131 |
132 | gRPC provides a couple of ways to tell it which certificates are valid:
133 | 1. Set the `GRPC_DEFAULT_SSL_ROOTS_FILE_PATH` to point to the `cert.pem` file you used to deploy the service.
134 | 2. When constructing an SslCredentials object, pass the contents of the `cert.pem` file to the constructor.
135 |
136 | Note that using either of these will cause GRPC to use that as the root cert instead of your normal root cert.
137 |
138 | gRPC will not accept untrusted certificates, and your client will fail with an Unavailable status code and the details "Connect Failed".
--------------------------------------------------------------------------------
/pythonlib/amlrealtimeai/external/tensorflow/core/framework/variable_pb2.py:
--------------------------------------------------------------------------------
1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
2 | # source: tensorflow/core/framework/variable.proto
3 |
4 | import sys
5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
6 | from google.protobuf import descriptor as _descriptor
7 | from google.protobuf import message as _message
8 | from google.protobuf import reflection as _reflection
9 | from google.protobuf import symbol_database as _symbol_database
10 | from google.protobuf import descriptor_pb2
11 | # @@protoc_insertion_point(imports)
12 |
13 | _sym_db = _symbol_database.Default()
14 |
15 |
16 |
17 |
18 | DESCRIPTOR = _descriptor.FileDescriptor(
19 | name='tensorflow/core/framework/variable.proto',
20 | package='tensorflow',
21 | syntax='proto3',
22 | serialized_pb=_b('\n(tensorflow/core/framework/variable.proto\x12\ntensorflow\"\xc1\x01\n\x0bVariableDef\x12\x15\n\rvariable_name\x18\x01 \x01(\t\x12\x1a\n\x12initial_value_name\x18\x06 \x01(\t\x12\x18\n\x10initializer_name\x18\x02 \x01(\t\x12\x15\n\rsnapshot_name\x18\x03 \x01(\t\x12\x39\n\x13save_slice_info_def\x18\x04 \x01(\x0b\x32\x1c.tensorflow.SaveSliceInfoDef\x12\x13\n\x0bis_resource\x18\x05 \x01(\x08\"`\n\x10SaveSliceInfoDef\x12\x11\n\tfull_name\x18\x01 \x01(\t\x12\x12\n\nfull_shape\x18\x02 \x03(\x03\x12\x12\n\nvar_offset\x18\x03 \x03(\x03\x12\x11\n\tvar_shape\x18\x04 \x03(\x03\x42/\n\x18org.tensorflow.frameworkB\x0eVariableProtosP\x01\xf8\x01\x01\x62\x06proto3')
23 | )
24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR)
25 |
26 |
27 |
28 |
29 | _VARIABLEDEF = _descriptor.Descriptor(
30 | name='VariableDef',
31 | full_name='tensorflow.VariableDef',
32 | filename=None,
33 | file=DESCRIPTOR,
34 | containing_type=None,
35 | fields=[
36 | _descriptor.FieldDescriptor(
37 | name='variable_name', full_name='tensorflow.VariableDef.variable_name', index=0,
38 | number=1, type=9, cpp_type=9, label=1,
39 | has_default_value=False, default_value=_b("").decode('utf-8'),
40 | message_type=None, enum_type=None, containing_type=None,
41 | is_extension=False, extension_scope=None,
42 | options=None),
43 | _descriptor.FieldDescriptor(
44 | name='initial_value_name', full_name='tensorflow.VariableDef.initial_value_name', index=1,
45 | number=6, type=9, cpp_type=9, label=1,
46 | has_default_value=False, default_value=_b("").decode('utf-8'),
47 | message_type=None, enum_type=None, containing_type=None,
48 | is_extension=False, extension_scope=None,
49 | options=None),
50 | _descriptor.FieldDescriptor(
51 | name='initializer_name', full_name='tensorflow.VariableDef.initializer_name', index=2,
52 | number=2, type=9, cpp_type=9, label=1,
53 | has_default_value=False, default_value=_b("").decode('utf-8'),
54 | message_type=None, enum_type=None, containing_type=None,
55 | is_extension=False, extension_scope=None,
56 | options=None),
57 | _descriptor.FieldDescriptor(
58 | name='snapshot_name', full_name='tensorflow.VariableDef.snapshot_name', index=3,
59 | number=3, type=9, cpp_type=9, label=1,
60 | has_default_value=False, default_value=_b("").decode('utf-8'),
61 | message_type=None, enum_type=None, containing_type=None,
62 | is_extension=False, extension_scope=None,
63 | options=None),
64 | _descriptor.FieldDescriptor(
65 | name='save_slice_info_def', full_name='tensorflow.VariableDef.save_slice_info_def', index=4,
66 | number=4, type=11, cpp_type=10, label=1,
67 | has_default_value=False, default_value=None,
68 | message_type=None, enum_type=None, containing_type=None,
69 | is_extension=False, extension_scope=None,
70 | options=None),
71 | _descriptor.FieldDescriptor(
72 | name='is_resource', full_name='tensorflow.VariableDef.is_resource', index=5,
73 | number=5, type=8, cpp_type=7, label=1,
74 | has_default_value=False, default_value=False,
75 | message_type=None, enum_type=None, containing_type=None,
76 | is_extension=False, extension_scope=None,
77 | options=None),
78 | ],
79 | extensions=[
80 | ],
81 | nested_types=[],
82 | enum_types=[
83 | ],
84 | options=None,
85 | is_extendable=False,
86 | syntax='proto3',
87 | extension_ranges=[],
88 | oneofs=[
89 | ],
90 | serialized_start=57,
91 | serialized_end=250,
92 | )
93 |
94 |
95 | _SAVESLICEINFODEF = _descriptor.Descriptor(
96 | name='SaveSliceInfoDef',
97 | full_name='tensorflow.SaveSliceInfoDef',
98 | filename=None,
99 | file=DESCRIPTOR,
100 | containing_type=None,
101 | fields=[
102 | _descriptor.FieldDescriptor(
103 | name='full_name', full_name='tensorflow.SaveSliceInfoDef.full_name', index=0,
104 | number=1, type=9, cpp_type=9, label=1,
105 | has_default_value=False, default_value=_b("").decode('utf-8'),
106 | message_type=None, enum_type=None, containing_type=None,
107 | is_extension=False, extension_scope=None,
108 | options=None),
109 | _descriptor.FieldDescriptor(
110 | name='full_shape', full_name='tensorflow.SaveSliceInfoDef.full_shape', index=1,
111 | number=2, type=3, cpp_type=2, label=3,
112 | has_default_value=False, default_value=[],
113 | message_type=None, enum_type=None, containing_type=None,
114 | is_extension=False, extension_scope=None,
115 | options=None),
116 | _descriptor.FieldDescriptor(
117 | name='var_offset', full_name='tensorflow.SaveSliceInfoDef.var_offset', index=2,
118 | number=3, type=3, cpp_type=2, label=3,
119 | has_default_value=False, default_value=[],
120 | message_type=None, enum_type=None, containing_type=None,
121 | is_extension=False, extension_scope=None,
122 | options=None),
123 | _descriptor.FieldDescriptor(
124 | name='var_shape', full_name='tensorflow.SaveSliceInfoDef.var_shape', index=3,
125 | number=4, type=3, cpp_type=2, label=3,
126 | has_default_value=False, default_value=[],
127 | message_type=None, enum_type=None, containing_type=None,
128 | is_extension=False, extension_scope=None,
129 | options=None),
130 | ],
131 | extensions=[
132 | ],
133 | nested_types=[],
134 | enum_types=[
135 | ],
136 | options=None,
137 | is_extendable=False,
138 | syntax='proto3',
139 | extension_ranges=[],
140 | oneofs=[
141 | ],
142 | serialized_start=252,
143 | serialized_end=348,
144 | )
145 |
146 | _VARIABLEDEF.fields_by_name['save_slice_info_def'].message_type = _SAVESLICEINFODEF
147 | DESCRIPTOR.message_types_by_name['VariableDef'] = _VARIABLEDEF
148 | DESCRIPTOR.message_types_by_name['SaveSliceInfoDef'] = _SAVESLICEINFODEF
149 |
150 | VariableDef = _reflection.GeneratedProtocolMessageType('VariableDef', (_message.Message,), dict(
151 | DESCRIPTOR = _VARIABLEDEF,
152 | __module__ = 'tensorflow.core.framework.variable_pb2'
153 | # @@protoc_insertion_point(class_scope:tensorflow.VariableDef)
154 | ))
155 | _sym_db.RegisterMessage(VariableDef)
156 |
157 | SaveSliceInfoDef = _reflection.GeneratedProtocolMessageType('SaveSliceInfoDef', (_message.Message,), dict(
158 | DESCRIPTOR = _SAVESLICEINFODEF,
159 | __module__ = 'tensorflow.core.framework.variable_pb2'
160 | # @@protoc_insertion_point(class_scope:tensorflow.SaveSliceInfoDef)
161 | ))
162 | _sym_db.RegisterMessage(SaveSliceInfoDef)
163 |
164 |
165 | DESCRIPTOR.has_options = True
166 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\016VariableProtosP\001\370\001\001'))
167 | # @@protoc_insertion_point(module_scope)
168 |
--------------------------------------------------------------------------------
/sample-clients/csharp/protos/tensorflow/NamedTensor.cs:
--------------------------------------------------------------------------------
1 | // Generated by the protocol buffer compiler. DO NOT EDIT!
2 | // source: tensorflow/core/protobuf/named_tensor.proto
3 | #pragma warning disable 1591, 0612, 3021
4 | #region Designer generated code
5 |
6 | using pb = global::Google.Protobuf;
7 | using pbc = global::Google.Protobuf.Collections;
8 | using pbr = global::Google.Protobuf.Reflection;
9 | using scg = global::System.Collections.Generic;
10 | namespace Tensorflow {
11 |
12 | /// Holder for reflection information generated from tensorflow/core/protobuf/named_tensor.proto
13 | public static partial class NamedTensorReflection {
14 |
15 | #region Descriptor
16 | /// File descriptor for tensorflow/core/protobuf/named_tensor.proto
17 | public static pbr::FileDescriptor Descriptor {
18 | get { return descriptor; }
19 | }
20 | private static pbr::FileDescriptor descriptor;
21 |
22 | static NamedTensorReflection() {
23 | byte[] descriptorData = global::System.Convert.FromBase64String(
24 | string.Concat(
25 | "Cit0ZW5zb3JmbG93L2NvcmUvcHJvdG9idWYvbmFtZWRfdGVuc29yLnByb3Rv",
26 | "Egp0ZW5zb3JmbG93GiZ0ZW5zb3JmbG93L2NvcmUvZnJhbWV3b3JrL3RlbnNv",
27 | "ci5wcm90byJJChBOYW1lZFRlbnNvclByb3RvEgwKBG5hbWUYASABKAkSJwoG",
28 | "dGVuc29yGAIgASgLMhcudGVuc29yZmxvdy5UZW5zb3JQcm90b0IyChhvcmcu",
29 | "dGVuc29yZmxvdy5mcmFtZXdvcmtCEU5hbWVkVGVuc29yUHJvdG9zUAH4AQFi",
30 | "BnByb3RvMw=="));
31 | descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
32 | new pbr::FileDescriptor[] { global::Tensorflow.TensorReflection.Descriptor, },
33 | new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
34 | new pbr::GeneratedClrTypeInfo(typeof(global::Tensorflow.NamedTensorProto), global::Tensorflow.NamedTensorProto.Parser, new[]{ "Name", "Tensor" }, null, null, null)
35 | }));
36 | }
37 | #endregion
38 |
39 | }
40 | #region Messages
41 | ///
42 | /// A pair of tensor name and tensor values.
43 | ///
44 | public sealed partial class NamedTensorProto : pb::IMessage {
45 | private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NamedTensorProto());
46 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
47 | public static pb::MessageParser Parser { get { return _parser; } }
48 |
49 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
50 | public static pbr::MessageDescriptor Descriptor {
51 | get { return global::Tensorflow.NamedTensorReflection.Descriptor.MessageTypes[0]; }
52 | }
53 |
54 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
55 | pbr::MessageDescriptor pb::IMessage.Descriptor {
56 | get { return Descriptor; }
57 | }
58 |
59 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
60 | public NamedTensorProto() {
61 | OnConstruction();
62 | }
63 |
64 | partial void OnConstruction();
65 |
66 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
67 | public NamedTensorProto(NamedTensorProto other) : this() {
68 | name_ = other.name_;
69 | Tensor = other.tensor_ != null ? other.Tensor.Clone() : null;
70 | }
71 |
72 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
73 | public NamedTensorProto Clone() {
74 | return new NamedTensorProto(this);
75 | }
76 |
77 | /// Field number for the "name" field.
78 | public const int NameFieldNumber = 1;
79 | private string name_ = "";
80 | ///
81 | /// Name of the tensor.
82 | ///
83 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
84 | public string Name {
85 | get { return name_; }
86 | set {
87 | name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
88 | }
89 | }
90 |
91 | /// Field number for the "tensor" field.
92 | public const int TensorFieldNumber = 2;
93 | private global::Tensorflow.TensorProto tensor_;
94 | ///
95 | /// The client can populate a TensorProto using a tensorflow::Tensor`, or
96 | /// directly using the protobuf field accessors.
97 | ///
98 | /// The client specifies whether the returned tensor values should be
99 | /// filled tensor fields (float_val, int_val, etc.) or encoded in a
100 | /// compact form in tensor.tensor_content.
101 | ///
102 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
103 | public global::Tensorflow.TensorProto Tensor {
104 | get { return tensor_; }
105 | set {
106 | tensor_ = value;
107 | }
108 | }
109 |
110 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
111 | public override bool Equals(object other) {
112 | return Equals(other as NamedTensorProto);
113 | }
114 |
115 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
116 | public bool Equals(NamedTensorProto other) {
117 | if (ReferenceEquals(other, null)) {
118 | return false;
119 | }
120 | if (ReferenceEquals(other, this)) {
121 | return true;
122 | }
123 | if (Name != other.Name) return false;
124 | if (!object.Equals(Tensor, other.Tensor)) return false;
125 | return true;
126 | }
127 |
128 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
129 | public override int GetHashCode() {
130 | int hash = 1;
131 | if (Name.Length != 0) hash ^= Name.GetHashCode();
132 | if (tensor_ != null) hash ^= Tensor.GetHashCode();
133 | return hash;
134 | }
135 |
136 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
137 | public override string ToString() {
138 | return pb::JsonFormatter.ToDiagnosticString(this);
139 | }
140 |
141 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
142 | public void WriteTo(pb::CodedOutputStream output) {
143 | if (Name.Length != 0) {
144 | output.WriteRawTag(10);
145 | output.WriteString(Name);
146 | }
147 | if (tensor_ != null) {
148 | output.WriteRawTag(18);
149 | output.WriteMessage(Tensor);
150 | }
151 | }
152 |
153 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
154 | public int CalculateSize() {
155 | int size = 0;
156 | if (Name.Length != 0) {
157 | size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
158 | }
159 | if (tensor_ != null) {
160 | size += 1 + pb::CodedOutputStream.ComputeMessageSize(Tensor);
161 | }
162 | return size;
163 | }
164 |
165 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
166 | public void MergeFrom(NamedTensorProto other) {
167 | if (other == null) {
168 | return;
169 | }
170 | if (other.Name.Length != 0) {
171 | Name = other.Name;
172 | }
173 | if (other.tensor_ != null) {
174 | if (tensor_ == null) {
175 | tensor_ = new global::Tensorflow.TensorProto();
176 | }
177 | Tensor.MergeFrom(other.Tensor);
178 | }
179 | }
180 |
181 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute]
182 | public void MergeFrom(pb::CodedInputStream input) {
183 | uint tag;
184 | while ((tag = input.ReadTag()) != 0) {
185 | switch(tag) {
186 | default:
187 | input.SkipLastField();
188 | break;
189 | case 10: {
190 | Name = input.ReadString();
191 | break;
192 | }
193 | case 18: {
194 | if (tensor_ == null) {
195 | tensor_ = new global::Tensorflow.TensorProto();
196 | }
197 | input.ReadMessage(tensor_);
198 | break;
199 | }
200 | }
201 | }
202 | }
203 |
204 | }
205 |
206 | #endregion
207 |
208 | }
209 |
210 | #endregion Designer generated code
211 |
--------------------------------------------------------------------------------