├── deploy-r-models ├── R │ ├── Dockerfile │ ├── DeployRModels.Rproj │ ├── 01-plumber-debug.R │ ├── plumber-api.R │ ├── 00-train-model.R │ ├── 02a-mmls-deploy.R │ └── 02b-deploy-aci.sh ├── TestDeployedRModels │ ├── TestDeployedRModels │ │ ├── Config │ │ │ ├── CarSvcContainerInstance.cs │ │ │ └── MachineLearningServer.cs │ │ ├── packages.config │ │ ├── app.config │ │ ├── carsService │ │ │ ├── Models │ │ │ │ ├── OutputParameters.cs │ │ │ │ ├── RenewTokenRequest.cs │ │ │ │ ├── StartBatchExecutionResponse.cs │ │ │ │ ├── InputParameters.cs │ │ │ │ ├── LoginRequest.cs │ │ │ │ ├── Error.cs │ │ │ │ ├── AccessTokenResponse.cs │ │ │ │ ├── WebServiceResult.cs │ │ │ │ ├── BatchWebServiceResult.cs │ │ │ │ └── ErrorException.cs │ │ │ ├── ICarsService.cs │ │ │ └── CarsServiceExtensions.cs │ │ ├── Properties │ │ │ └── AssemblyInfo.cs │ │ ├── Program.cs │ │ └── TestDeployedRModels.csproj │ └── TestDeployedRModels.sln ├── setup │ └── install-mmls-ubuntu.sh ├── deploy-mmls.sh └── SQL │ └── SqlServerMachineLearningServices.ipynb ├── LICENSE ├── azure-ml-svc-prep-train-py ├── 00-Setup.ipynb ├── train │ └── train_Fashion_MNIST.py ├── 03-Fashion-MNIST-AzureMLCompute.ipynb ├── 01-Fashion-MNIST-Local.ipynb └── 02-Fashion-MNIST-LogToAzure.ipynb └── .gitignore /deploy-r-models/R/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM trestletech/plumber 2 | 3 | COPY cars-model.rds /cars-model.rds 4 | COPY plumber-api.R /plumber.R 5 | 6 | CMD ["/plumber.R"] 7 | -------------------------------------------------------------------------------- /deploy-r-models/R/DeployRModels.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/Config/CarSvcContainerInstance.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace TestDeployedRModels.Config 4 | { 5 | public class CarSvcContainerInstance 6 | { 7 | public static readonly Uri ManualTransmissionEndpoint = new Uri("http://..azurecontainer.io:8000/manualtransmission"); 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/Config/MachineLearningServer.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace TestDeployedRModels.Config 4 | { 5 | public class MachineLearningServer 6 | { 7 | public static readonly Uri Url = new Uri("http://..cloudapp.azure.com:12800"); 8 | public const string User = "admin"; 9 | public const string Password = ""; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/packages.config: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/app.config: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /deploy-r-models/R/01-plumber-debug.R: -------------------------------------------------------------------------------- 1 | if (!require("plumber")) install.packages("plumber") 2 | if (!require("jsonlite")) install.packages("jsonlite") 3 | library(plumber) 4 | library(jsonlite) 5 | 6 | pr <- plumb("plumber-api.R") 7 | 8 | swaggerFile <- pr$swaggerFile() 9 | swaggerFile$info$title <- "plumberCarsService" 10 | swaggerFile$info$description <- "Returns the probability of a vehicle being fitted with a manual transmission" 11 | swaggerFile$info$version <- "1.0.0" 12 | swagger <- toJSON(swaggerFile, pretty = TRUE, auto_unbox = TRUE) 13 | cat(swagger, file = "plumber-swagger.json", append = FALSE) 14 | 15 | pr$run(port=8000) 16 | # http://127.0.0.1:8000/swagger.json 17 | -------------------------------------------------------------------------------- /deploy-r-models/R/plumber-api.R: -------------------------------------------------------------------------------- 1 | # plumber.R 2 | 3 | model <- readRDS("cars-model.rds") 4 | 5 | #* Plot a histogram of the gross horsepower 6 | #* @png 7 | #* @get /plothp 8 | function(){ 9 | hist(mtcars$hp) 10 | } 11 | 12 | #* Plot a histogram of the manual transmission 13 | #* @png 14 | #* @get /plotam 15 | function(){ 16 | hist(mtcars$am) 17 | } 18 | 19 | #* Plot a histogram of the weight (1000 lbs) 20 | #* @png 21 | #* @get /plotwt 22 | function(){ 23 | hist(mtcars$wt) 24 | } 25 | 26 | #* Returns the probability whether the car has a manual transmission 27 | #* @param hp Gross horsepower 28 | #* @param wt Weight (1000 lbs) 29 | #* @post /manualtransmission 30 | function(hp, wt){ 31 | newdata <- data.frame(hp = as.numeric(hp), wt = as.numeric(wt)) 32 | predict(model, newdata, type = "response") 33 | } 34 | -------------------------------------------------------------------------------- /deploy-r-models/R/00-train-model.R: -------------------------------------------------------------------------------- 1 | ########################################################## 2 | # Create, Test and Save a Logistic Regression Model # 3 | ########################################################## 4 | 5 | # Use logistic regression equation of vehicle transmission 6 | # in the data set mtcars to estimate the probability of 7 | # a vehicle being fitted with a manual transmission 8 | # based on horsepower (hp) and weight (wt) 9 | 10 | ?mtcars 11 | summary(mtcars) 12 | 13 | # Create glm model with `mtcars` dataset 14 | carsModel <- glm(formula = am ~ hp + wt, data = mtcars, family = binomial) 15 | 16 | # Produce a prediction function that can use the model 17 | manualTransmission <- function(hp, wt) { 18 | newdata <- data.frame(hp = hp, wt = wt) 19 | predict(carsModel, newdata, type = "response") 20 | } 21 | 22 | # test function locally by printing results 23 | print(manualTransmission(120, 2.8)) # 0.6418125 24 | 25 | carsModel <- glm(formula = am ~ hp + wt, data = mtcars, family = binomial) 26 | saveRDS(carsModel, "cars-model.rds") 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Sascha Dittmann 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/OutputParameters.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class OutputParameters 15 | { 16 | /// 17 | /// Initializes a new instance of the OutputParameters class. 18 | /// 19 | public OutputParameters() { } 20 | 21 | /// 22 | /// Initializes a new instance of the OutputParameters class. 23 | /// 24 | public OutputParameters(double? answer = default(double?)) 25 | { 26 | Answer = answer; 27 | } 28 | 29 | /// 30 | /// numeric 31 | /// 32 | [JsonProperty(PropertyName = "answer")] 33 | public double? Answer { get; set; } 34 | 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | VisualStudioVersion = 15.0.28307.902 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TestDeployedRModels", "TestDeployedRModels\TestDeployedRModels.csproj", "{8439A0AA-BD90-4701-9979-90040AC9EFC8}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {8439A0AA-BD90-4701-9979-90040AC9EFC8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {8439A0AA-BD90-4701-9979-90040AC9EFC8}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {8439A0AA-BD90-4701-9979-90040AC9EFC8}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {8439A0AA-BD90-4701-9979-90040AC9EFC8}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {1AD74910-A120-4EF6-A6DD-51AD872D3581} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/RenewTokenRequest.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class RenewTokenRequest 15 | { 16 | /// 17 | /// Initializes a new instance of the RenewTokenRequest class. 18 | /// 19 | public RenewTokenRequest() { } 20 | 21 | /// 22 | /// Initializes a new instance of the RenewTokenRequest class. 23 | /// 24 | public RenewTokenRequest(string refreshToken = default(string)) 25 | { 26 | RefreshToken = refreshToken; 27 | } 28 | 29 | /// 30 | /// A currently valid refresh token. 31 | /// 32 | [JsonProperty(PropertyName = "refreshToken")] 33 | public string RefreshToken { get; set; } 34 | 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/StartBatchExecutionResponse.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class StartBatchExecutionResponse 15 | { 16 | /// 17 | /// Initializes a new instance of the StartBatchExecutionResponse 18 | /// class. 19 | /// 20 | public StartBatchExecutionResponse() { } 21 | 22 | /// 23 | /// Initializes a new instance of the StartBatchExecutionResponse 24 | /// class. 25 | /// 26 | public StartBatchExecutionResponse(string batchExecutionId = default(string)) 27 | { 28 | BatchExecutionId = batchExecutionId; 29 | } 30 | 31 | /// 32 | /// Id of the asynchronous execution. 33 | /// 34 | [JsonProperty(PropertyName = "batchExecutionId")] 35 | public string BatchExecutionId { get; set; } 36 | 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/InputParameters.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class InputParameters 15 | { 16 | /// 17 | /// Initializes a new instance of the InputParameters class. 18 | /// 19 | public InputParameters() { } 20 | 21 | /// 22 | /// Initializes a new instance of the InputParameters class. 23 | /// 24 | public InputParameters(double? hp = default(double?), double? wt = default(double?)) 25 | { 26 | Hp = hp; 27 | Wt = wt; 28 | } 29 | 30 | /// 31 | /// numeric 32 | /// 33 | [JsonProperty(PropertyName = "hp")] 34 | public double? Hp { get; set; } 35 | 36 | /// 37 | /// numeric 38 | /// 39 | [JsonProperty(PropertyName = "wt")] 40 | public double? Wt { get; set; } 41 | 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/LoginRequest.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class LoginRequest 15 | { 16 | /// 17 | /// Initializes a new instance of the LoginRequest class. 18 | /// 19 | public LoginRequest() { } 20 | 21 | /// 22 | /// Initializes a new instance of the LoginRequest class. 23 | /// 24 | public LoginRequest(string username = default(string), string password = default(string)) 25 | { 26 | Username = username; 27 | Password = password; 28 | } 29 | 30 | /// 31 | /// The name of the user. 32 | /// 33 | [JsonProperty(PropertyName = "username")] 34 | public string Username { get; set; } 35 | 36 | /// 37 | /// The password of the user. 38 | /// 39 | [JsonProperty(PropertyName = "password")] 40 | public string Password { get; set; } 41 | 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/Error.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class Error 15 | { 16 | /// 17 | /// Initializes a new instance of the Error class. 18 | /// 19 | public Error() { } 20 | 21 | /// 22 | /// Initializes a new instance of the Error class. 23 | /// 24 | public Error(int? code = default(int?), string message = default(string), string fields = default(string)) 25 | { 26 | Code = code; 27 | Message = message; 28 | Fields = fields; 29 | } 30 | 31 | /// 32 | /// 33 | [JsonProperty(PropertyName = "code")] 34 | public int? Code { get; set; } 35 | 36 | /// 37 | /// 38 | [JsonProperty(PropertyName = "message")] 39 | public string Message { get; set; } 40 | 41 | /// 42 | /// 43 | [JsonProperty(PropertyName = "fields")] 44 | public string Fields { get; set; } 45 | 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/Properties/AssemblyInfo.cs: -------------------------------------------------------------------------------- 1 | using System.Reflection; 2 | using System.Runtime.CompilerServices; 3 | using System.Runtime.InteropServices; 4 | 5 | // General Information about an assembly is controlled through the following 6 | // set of attributes. Change these attribute values to modify the information 7 | // associated with an assembly. 8 | [assembly: AssemblyTitle("TestDeployedRModels")] 9 | [assembly: AssemblyDescription("")] 10 | [assembly: AssemblyConfiguration("")] 11 | [assembly: AssemblyCompany("")] 12 | [assembly: AssemblyProduct("TestDeployedRModels")] 13 | [assembly: AssemblyCopyright("Copyright © 2019")] 14 | [assembly: AssemblyTrademark("")] 15 | [assembly: AssemblyCulture("")] 16 | 17 | // Setting ComVisible to false makes the types in this assembly not visible 18 | // to COM components. If you need to access a type in this assembly from 19 | // COM, set the ComVisible attribute to true on that type. 20 | [assembly: ComVisible(false)] 21 | 22 | // The following GUID is for the ID of the typelib if this project is exposed to COM 23 | [assembly: Guid("8439a0aa-bd90-4701-9979-90040ac9efc8")] 24 | 25 | // Version information for an assembly consists of the following four values: 26 | // 27 | // Major Version 28 | // Minor Version 29 | // Build Number 30 | // Revision 31 | // 32 | // You can specify all the values or you can default the Build and Revision Numbers 33 | // by using the '*' as shown below: 34 | // [assembly: AssemblyVersion("1.0.*")] 35 | [assembly: AssemblyVersion("1.0.0.0")] 36 | [assembly: AssemblyFileVersion("1.0.0.0")] 37 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/AccessTokenResponse.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class AccessTokenResponse 15 | { 16 | /// 17 | /// Initializes a new instance of the AccessTokenResponse class. 18 | /// 19 | public AccessTokenResponse() { } 20 | 21 | /// 22 | /// Initializes a new instance of the AccessTokenResponse class. 23 | /// 24 | public AccessTokenResponse(string tokenType = default(string), string accessToken = default(string), string expiresOn = default(string), string refreshToken = default(string)) 25 | { 26 | TokenType = tokenType; 27 | AccessToken = accessToken; 28 | ExpiresOn = expiresOn; 29 | RefreshToken = refreshToken; 30 | } 31 | 32 | /// 33 | /// 34 | [JsonProperty(PropertyName = "token_type")] 35 | public string TokenType { get; set; } 36 | 37 | /// 38 | /// 39 | [JsonProperty(PropertyName = "access_token")] 40 | public string AccessToken { get; set; } 41 | 42 | /// 43 | /// 44 | [JsonProperty(PropertyName = "expires_on")] 45 | public string ExpiresOn { get; set; } 46 | 47 | /// 48 | /// 49 | [JsonProperty(PropertyName = "refresh_token")] 50 | public string RefreshToken { get; set; } 51 | 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /azure-ml-svc-prep-train-py/00-Setup.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from azureml.core import Workspace\n", 10 | "import os\n", 11 | "\n", 12 | "subscription_id = \"\"\n", 13 | "resource_group = \"mlops-youtube\"\n", 14 | "workspace_name = \"mlops-youtube-mlwrksp\"\n", 15 | "workspace_region = \"westeurope\"\n", 16 | "\n", 17 | "try:\n", 18 | " ws = Workspace(subscription_id = subscription_id, \n", 19 | " resource_group = resource_group, \n", 20 | " workspace_name = workspace_name)\n", 21 | " ws.write_config()\n", 22 | " print('Library configuration succeeded')\n", 23 | "except:\n", 24 | " ws = Workspace.create(name = workspace_name,\n", 25 | " subscription_id = subscription_id,\n", 26 | " resource_group = resource_group, \n", 27 | " location = workspace_region,\n", 28 | " create_resource_group = True,\n", 29 | " exist_ok = True)\n", 30 | " ws.get_details()\n", 31 | " ws.write_config()\n", 32 | " print('Library configuration succeeded')" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [] 41 | } 42 | ], 43 | "metadata": { 44 | "kernelspec": { 45 | "display_name": "Python 3", 46 | "language": "python", 47 | "name": "python3" 48 | }, 49 | "language_info": { 50 | "codemirror_mode": { 51 | "name": "ipython", 52 | "version": 3 53 | }, 54 | "file_extension": ".py", 55 | "mimetype": "text/x-python", 56 | "name": "python", 57 | "nbconvert_exporter": "python", 58 | "pygments_lexer": "ipython3", 59 | "version": "3.5.2" 60 | } 61 | }, 62 | "nbformat": 4, 63 | "nbformat_minor": 2 64 | } 65 | -------------------------------------------------------------------------------- /deploy-r-models/setup/install-mmls-ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | declare password="" 3 | 4 | # Initialize parameters specified from command line 5 | while getopts ":p:" arg; do 6 | case "${arg}" in 7 | p) 8 | password=${OPTARG} 9 | ;; 10 | esac 11 | done 12 | shift $((OPTIND-1)) 13 | 14 | if [[ -z "$password" ]]; then 15 | while : ; do 16 | echo -n "Enter a password for the Machine Learning Server admin:" 17 | read -s password 18 | echo 19 | echo -n "Please repeat the password for the Machine Learning Server admin:" 20 | read -s password_confirm 21 | echo 22 | if [[ "$password" == "$password_confirm" ]]; then 23 | break 24 | else 25 | echo "The passwords do not match. Please retry." 26 | fi 27 | done 28 | fi 29 | 30 | # Optionally, if your system does not have the https apt transport option 31 | apt-get install apt-transport-https 32 | 33 | # Add the **azure-cli** repo to your apt sources list 34 | AZ_REPO=$(lsb_release -cs) 35 | 36 | echo "deb [arch=amd64] https://packages.microsoft.com/repos/azure-cli/ $AZ_REPO main" | sudo tee /etc/apt/sources.list.d/azure-cli.list 37 | 38 | # Set the location of the package repo the "prod" directory containing the distribution. 39 | # This example specifies 16.04. Replace with 14.04 if you want that version 40 | wget https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb 41 | 42 | # Register the repo 43 | dpkg -i packages-microsoft-prod.deb 44 | 45 | # Remove deb file 46 | rm -rf packages-microsoft-prod.deb 47 | 48 | # Add the Microsoft public signing key for Secure APT 49 | apt-key adv --keyserver packages.microsoft.com --recv-keys 52E16F86FEE04B979B07E28DB02C46DF417A0893 50 | 51 | # Update packages on your system 52 | apt-get update 53 | 54 | # Install the server 55 | apt-get install -y microsoft-mlserver-all-9.4.7 56 | 57 | # Activate the server 58 | /opt/microsoft/mlserver/9.4.7/bin/R/activate.sh -a -l 59 | 60 | # Set up both o16n nodes on one machine 61 | az mlserver admin node setup --onebox --admin-password $password --confirm-password $password 62 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/WebServiceResult.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class WebServiceResult 15 | { 16 | /// 17 | /// Initializes a new instance of the WebServiceResult class. 18 | /// 19 | public WebServiceResult() { } 20 | 21 | /// 22 | /// Initializes a new instance of the WebServiceResult class. 23 | /// 24 | public WebServiceResult(bool? success = default(bool?), string errorMessage = default(string), string consoleOutput = default(string), IList changedFiles = default(IList), OutputParameters outputParameters = default(OutputParameters)) 25 | { 26 | Success = success; 27 | ErrorMessage = errorMessage; 28 | ConsoleOutput = consoleOutput; 29 | ChangedFiles = changedFiles; 30 | OutputParameters = outputParameters; 31 | } 32 | 33 | /// 34 | /// Boolean flag indicating the success status of web service 35 | /// execution. 36 | /// 37 | [JsonProperty(PropertyName = "success")] 38 | public bool? Success { get; set; } 39 | 40 | /// 41 | /// Error messages if any occurred during the web service execution. 42 | /// 43 | [JsonProperty(PropertyName = "errorMessage")] 44 | public string ErrorMessage { get; set; } 45 | 46 | /// 47 | /// Console output from the web service execution. 48 | /// 49 | [JsonProperty(PropertyName = "consoleOutput")] 50 | public string ConsoleOutput { get; set; } 51 | 52 | /// 53 | /// The filenames of the files modified during the web service 54 | /// execution. 55 | /// 56 | [JsonProperty(PropertyName = "changedFiles")] 57 | public IList ChangedFiles { get; set; } 58 | 59 | /// 60 | /// 61 | [JsonProperty(PropertyName = "outputParameters")] 62 | public OutputParameters OutputParameters { get; set; } 63 | 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/BatchWebServiceResult.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using System; 8 | using System.Linq; 9 | using System.Collections.Generic; 10 | using Newtonsoft.Json; 11 | using Microsoft.Rest; 12 | using Microsoft.Rest.Serialization; 13 | 14 | public partial class BatchWebServiceResult 15 | { 16 | /// 17 | /// Initializes a new instance of the BatchWebServiceResult class. 18 | /// 19 | public BatchWebServiceResult() { } 20 | 21 | /// 22 | /// Initializes a new instance of the BatchWebServiceResult class. 23 | /// 24 | public BatchWebServiceResult(string state = default(string), int? completedItemCount = default(int?), int? totalItemCount = default(int?), int? parallelCount = default(int?), IList batchExecutionResults = default(IList)) 25 | { 26 | State = state; 27 | CompletedItemCount = completedItemCount; 28 | TotalItemCount = totalItemCount; 29 | ParallelCount = parallelCount; 30 | BatchExecutionResults = batchExecutionResults; 31 | } 32 | 33 | /// 34 | /// State of the execution. Can be of the following values: 35 | /// - Pending: The batch execution was submitted but is not yet 36 | /// scheduled. 37 | /// Ready: The batch execution was submitted and can be executed. 38 | /// InProgress: The batch execution is currently being processed. 39 | /// Complete: The batch execution has been completed. Possible values 40 | /// include: 'pending', 'inProgress', 'ready', 'complete' 41 | /// 42 | [JsonProperty(PropertyName = "state")] 43 | public string State { get; set; } 44 | 45 | /// 46 | /// Number of completed items in this batch operation. 47 | /// 48 | [JsonProperty(PropertyName = "completedItemCount")] 49 | public int? CompletedItemCount { get; set; } 50 | 51 | /// 52 | /// Number of total items in this batch operation. 53 | /// 54 | [JsonProperty(PropertyName = "totalItemCount")] 55 | public int? TotalItemCount { get; set; } 56 | 57 | /// 58 | /// Number of parallel threads that are processing this batch 59 | /// operation. 60 | /// 61 | [JsonProperty(PropertyName = "parallelCount")] 62 | public int? ParallelCount { get; set; } 63 | 64 | /// 65 | /// The responses of the individual executions. 66 | /// 67 | [JsonProperty(PropertyName = "batchExecutionResults")] 68 | public IList BatchExecutionResults { get; set; } 69 | 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/Program.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Globalization; 4 | using System.Net; 5 | using System.Net.Http; 6 | using System.Threading.Tasks; 7 | using Microsoft.Rest; 8 | using Newtonsoft.Json; 9 | using RestSharp.Serialization.Json; 10 | using TestDeployedRModels.Config; 11 | using TestDeployedRModels.Models; 12 | 13 | namespace TestDeployedRModels 14 | { 15 | class Program 16 | { 17 | static void Main(string[] args) 18 | { 19 | MainAsync(args).GetAwaiter().GetResult(); 20 | 21 | Console.ReadKey(); 22 | } 23 | 24 | private static async Task MainAsync(string[] args) 25 | { 26 | try 27 | { 28 | var resultPlumber = await PlumberManualTransmission(120, 2.8); 29 | Console.WriteLine($"Plumber Result: {resultPlumber}"); 30 | 31 | var resultMls = await MlsManualTransmission(120, 2.8); 32 | Console.WriteLine($"Machine Learning Server Result: {resultMls}"); 33 | } 34 | catch (Exception e) 35 | { 36 | Console.WriteLine(e); 37 | } 38 | } 39 | 40 | private static async Task PlumberManualTransmission(double hp, double wt) 41 | { 42 | var client = new HttpClient(); 43 | 44 | var values = new Dictionary 45 | { 46 | { "hp", hp.ToString(CultureInfo.InvariantCulture) }, 47 | { "wt", wt.ToString(CultureInfo.InvariantCulture) } 48 | }; 49 | 50 | var content = new FormUrlEncodedContent(values); 51 | 52 | var response = await client.PostAsync( 53 | CarSvcContainerInstance.ManualTransmissionEndpoint, 54 | content); 55 | 56 | var responseString = await response.Content.ReadAsStringAsync(); 57 | 58 | var results = JsonConvert.DeserializeObject(responseString); 59 | 60 | return results.Length > 0 ? results[0] : null; 61 | } 62 | 63 | private static async Task GetAccessToken() 64 | { 65 | var client = new CarsService( 66 | MachineLearningServer.Url, 67 | new BasicAuthenticationCredentials()); 68 | var loginRequest = new LoginRequest( 69 | MachineLearningServer.User, 70 | MachineLearningServer.Password); 71 | var loginResponse = await client.LoginAsync(loginRequest); 72 | return loginResponse.AccessToken; 73 | } 74 | 75 | private static async Task MlsManualTransmission(double hp, double wt) 76 | { 77 | var accessToken = await GetAccessToken(); 78 | 79 | var auth = new TokenCredentials(accessToken); 80 | var client = new CarsService( 81 | MachineLearningServer.Url, auth); 82 | 83 | var result = await client.ManualTransmissionAsync( 84 | new InputParameters(hp, wt)); 85 | if (result.Success.HasValue && result.Success.Value) 86 | return result.OutputParameters.Answer; 87 | 88 | return null; 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/Models/ErrorException.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels.Models 6 | { 7 | using Microsoft.Rest; 8 | using System; 9 | using System.Net.Http; 10 | using System.Runtime.Serialization; 11 | #if !PORTABLE && !DNXCORE50 12 | using System.Security.Permissions; 13 | #endif 14 | 15 | /// 16 | /// Exception thrown for an invalid response with Error information. 17 | /// 18 | #if !PORTABLE && !DNXCORE50 19 | [Serializable] 20 | #endif 21 | public class ErrorException : RestException 22 | { 23 | /// 24 | /// Gets information about the associated HTTP request. 25 | /// 26 | public HttpRequestMessageWrapper Request { get; set; } 27 | 28 | /// 29 | /// Gets information about the associated HTTP response. 30 | /// 31 | public HttpResponseMessageWrapper Response { get; set; } 32 | 33 | /// 34 | /// Gets or sets the body object. 35 | /// 36 | public Error Body { get; set; } 37 | 38 | /// 39 | /// Initializes a new instance of the ErrorException class. 40 | /// 41 | public ErrorException() 42 | { 43 | } 44 | 45 | /// 46 | /// Initializes a new instance of the ErrorException class. 47 | /// 48 | /// The exception message. 49 | public ErrorException(string message) 50 | : this(message, null) 51 | { 52 | } 53 | 54 | /// 55 | /// Initializes a new instance of the ErrorException class. 56 | /// 57 | /// The exception message. 58 | /// Inner exception. 59 | public ErrorException(string message, Exception innerException) 60 | : base(message, innerException) 61 | { 62 | } 63 | 64 | #if !PORTABLE && !DNXCORE50 65 | /// 66 | /// Initializes a new instance of the ErrorException class. 67 | /// 68 | /// Serialization info. 69 | /// Streaming context. 70 | protected ErrorException(SerializationInfo info, StreamingContext context) 71 | : base(info, context) 72 | { 73 | } 74 | 75 | /// 76 | /// Serializes content of the exception. 77 | /// 78 | /// Serialization info. 79 | /// Streaming context. 80 | [SecurityPermission(SecurityAction.Demand, SerializationFormatter = true)] 81 | public override void GetObjectData(SerializationInfo info, StreamingContext context) 82 | { 83 | base.GetObjectData(info, context); 84 | if (info == null) 85 | { 86 | throw new ArgumentNullException("info"); 87 | } 88 | 89 | info.AddValue("Request", Request); 90 | info.AddValue("Response", Response); 91 | info.AddValue("Body", Body); 92 | } 93 | #endif 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /deploy-r-models/R/02a-mmls-deploy.R: -------------------------------------------------------------------------------- 1 | ########################################################## 2 | # Load & Test a Logistic Regression Model # 3 | ########################################################## 4 | 5 | # Use logistic regression equation of vehicle transmission 6 | # in the data set mtcars to estimate the probability of 7 | # a vehicle being fitted with a manual transmission 8 | # based on horsepower (hp) and weight (wt) 9 | 10 | # If on R Server 9.0, load mrsdeploy package now 11 | library(mrsdeploy) 12 | 13 | # Create glm model with `mtcars` dataset 14 | carsModel <- readRDS("cars-model.rds") 15 | 16 | # Produce a prediction function that can use the model 17 | manualTransmission <- function(hp, wt) { 18 | newdata <- data.frame(hp = hp, wt = wt) 19 | predict(carsModel, newdata, type = "response") 20 | } 21 | 22 | # test function locally by printing results 23 | print(manualTransmission(120, 2.8)) # 0.6418125 24 | 25 | ########################################################## 26 | # Log into Server # 27 | ########################################################## 28 | 29 | mlsvr_connection <- function() { 30 | path <- "msftmlsvr-connection.json" 31 | if (!file.exists(path)) { 32 | stop("Can't find secret file: '", path, "'") 33 | } 34 | 35 | jsonlite::read_json(path) 36 | } 37 | 38 | # Use `remoteLogin` to authenticate with Server using 39 | # the local admin account. Use session = false so no 40 | # remote R session started 41 | remoteLogin(mlsvr_connection()$endpoint, 42 | username = mlsvr_connection()$username, 43 | password = mlsvr_connection()$password, 44 | session = FALSE) 45 | 46 | ########################################################## 47 | # Publish Model as a Service # 48 | ########################################################## 49 | 50 | # Generate a unique serviceName for demos 51 | # and assign to variable serviceName 52 | serviceName <- "carsService" 53 | 54 | # Publish as service using publishService() function from 55 | # mrsdeploy package. Name service "mtService" and provide 56 | # unique version number. Assign service to the variable `api` 57 | api <- publishService( 58 | serviceName, 59 | code = manualTransmission, 60 | model = carsModel, 61 | inputs = list(hp = "numeric", wt = "numeric"), 62 | outputs = list(answer = "numeric"), 63 | v = "v1.0.0" 64 | ) 65 | 66 | ########################################################## 67 | # Consume Service in R # 68 | ########################################################## 69 | 70 | # Print capabilities that define the service holdings: service 71 | # name, version, descriptions, inputs, outputs, and the 72 | # name of the function to be consumed 73 | print(api$capabilities()) 74 | 75 | # Consume service by calling function, `manualTransmission` 76 | # contained in this service 77 | result <- api$manualTransmission(120, 2.8) 78 | 79 | # Print response output named `answer` 80 | print(result$output("answer")) # 0.6418125 81 | 82 | ########################################################## 83 | # Get Service-specific Swagger File in R # 84 | ########################################################## 85 | 86 | # During this authenticated session, download the 87 | # Swagger-based JSON file that defines this service 88 | swagger <- api$swagger() 89 | cat(swagger, file = "mlserver-swagger.json", append = FALSE) 90 | 91 | # Now share this Swagger-based JSON so others can consume it 92 | listServices() 93 | #deleteService(serviceName, "v1.0.0") 94 | -------------------------------------------------------------------------------- /deploy-r-models/R/02b-deploy-aci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | IFS=$'\n\t' 4 | 5 | # -e: immediately exit if any command has a non-zero exit status 6 | # -o: prevents errors in a pipeline from being masked 7 | # IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@) 8 | 9 | usage() { echo "Usage: $0 -i -g -n -l " 1>&2; exit 1; } 10 | 11 | declare subscriptionId="" 12 | declare resourceGroupName="" 13 | declare deploymentName="car-svc-aci-`date '+%Y-%m-%d-%H-%M-%S'`" 14 | declare resourceGroupLocation="" 15 | declare aciDnsNameLabel="" 16 | declare acrName="" 17 | 18 | # Initialize parameters specified from command line 19 | while getopts ":i:g:n:l:r:d:" arg; do 20 | case "${arg}" in 21 | i) 22 | subscriptionId=${OPTARG} 23 | ;; 24 | g) 25 | resourceGroupName=${OPTARG} 26 | ;; 27 | n) 28 | deploymentName=${OPTARG} 29 | ;; 30 | l) 31 | resourceGroupLocation=${OPTARG} 32 | ;; 33 | r) 34 | acrName=${OPTARG} 35 | ;; 36 | d) 37 | aciDnsNameLabel=${OPTARG} 38 | ;; 39 | esac 40 | done 41 | shift $((OPTIND-1)) 42 | 43 | #Prompt for parameters is some required parameters are missing 44 | if [[ -z "$subscriptionId" ]]; then 45 | echo "Your subscription ID can be looked up with the CLI using: az account show --out json " 46 | echo "Enter your subscription ID:" 47 | read subscriptionId 48 | [[ "${subscriptionId:?}" ]] 49 | fi 50 | 51 | if [[ -z "$resourceGroupName" ]]; then 52 | echo "This script will look for an existing resource group, otherwise a new one will be created " 53 | echo "You can create new resource groups with the CLI using: az group create " 54 | echo "Enter a resource group name" 55 | read resourceGroupName 56 | [[ "${resourceGroupName:?}" ]] 57 | fi 58 | 59 | if [[ -z "$deploymentName" ]]; then 60 | echo "Enter a name for this deployment:" 61 | read deploymentName 62 | fi 63 | 64 | if [[ -z "$resourceGroupLocation" ]]; then 65 | echo "If creating a *new* resource group, you need to set a location " 66 | echo "You can lookup locations with the CLI using: az account list-locations " 67 | 68 | echo "Enter resource group location:" 69 | read resourceGroupLocation 70 | fi 71 | 72 | if [[ -z "$acrName" ]]; then 73 | echo "Enter a name for the azure container registry:" 74 | read acrName 75 | fi 76 | 77 | if [[ -z "$aciDnsNameLabel" ]]; then 78 | echo "Enter a name for the azure container instance dns entry:" 79 | read aciDnsNameLabel 80 | fi 81 | 82 | #login to azure using your credentials 83 | az account show 1> /dev/null 84 | 85 | if [ $? != 0 ]; 86 | then 87 | az login 88 | fi 89 | 90 | #set the default subscription id 91 | az account set --subscription $subscriptionId 92 | 93 | set +e 94 | 95 | #Check for existing RG 96 | az group show --name $resourceGroupName 1> /dev/null 97 | 98 | if [ $? != 0 ]; then 99 | echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.." 100 | set -e 101 | ( 102 | set -x 103 | az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null 104 | ) 105 | else 106 | echo "Using existing resource group..." 107 | fi 108 | 109 | echo "Deploying Container Registry..." 110 | ( 111 | az acr create -g "$resourceGroupName" -n "$acrName" --sku basic --admin-enabled true 1> /dev/null 112 | ) 113 | 114 | echo "Building Docker Image..." 115 | ( 116 | docker build -t $acrName.azurecr.io/carssvc . 117 | ) 118 | 119 | echo "Uploading Docker Image..." 120 | ( 121 | docker push $acrName.azurecr.io/carssvc 122 | ) 123 | 124 | #Start deployment 125 | echo "Starting deployment..." 126 | ( 127 | acrPassword=$(az acr credential show -g "$resourceGroupName" -n "$acrName" | jq -r .passwords[0].value) 128 | az container create -g "$resourceGroupName" -n cars-svc-aci --image "$acrName.azurecr.io/carssvc:latest" --ports 8000 --dns-name-label "$aciDnsNameLabel" --registry-username "$acrName" --registry-password "$acrPassword" 129 | ) 130 | 131 | if [ $? == 0 ]; 132 | then 133 | echo "Azure Container Instance has been successfully deployed" 134 | fi 135 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/TestDeployedRModels.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Debug 6 | AnyCPU 7 | {8439A0AA-BD90-4701-9979-90040AC9EFC8} 8 | Exe 9 | TestDeployedRModels 10 | TestDeployedRModels 11 | v4.6.1 12 | 512 13 | true 14 | true 15 | 16 | 17 | AnyCPU 18 | true 19 | full 20 | false 21 | bin\Debug\ 22 | DEBUG;TRACE 23 | prompt 24 | 4 25 | 26 | 27 | AnyCPU 28 | pdbonly 29 | true 30 | bin\Release\ 31 | TRACE 32 | prompt 33 | 4 34 | 35 | 36 | 37 | ..\packages\JsonSubTypes.1.6.0\lib\net46\JsonSubTypes.dll 38 | 39 | 40 | ..\packages\Microsoft.Rest.ClientRuntime.2.3.20\lib\net461\Microsoft.Rest.ClientRuntime.dll 41 | 42 | 43 | ..\packages\Newtonsoft.Json.12.0.3\lib\net45\Newtonsoft.Json.dll 44 | 45 | 46 | ..\packages\RestSharp.106.6.10\lib\net452\RestSharp.dll 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | AutoRestCodeGenerator 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /azure-ml-svc-prep-train-py/train/train_Fashion_MNIST.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import warnings 3 | warnings.filterwarnings('ignore') 4 | 5 | import os 6 | import numpy as np 7 | from functools import partial 8 | import matplotlib.pyplot as plt 9 | import tensorflow as tf 10 | from tensorflow import keras 11 | from tensorflow.keras.datasets import fashion_mnist 12 | from tensorflow.keras.models import Sequential 13 | from tensorflow.keras.layers import Dense, Dropout, Flatten 14 | from tensorflow.keras.layers import Conv2D, MaxPooling2D 15 | from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint 16 | from tensorflow.keras import backend as K 17 | 18 | import azureml.core 19 | from azureml.core.run import Run 20 | 21 | print("TensorFlow version:", tf.__version__) 22 | print("Using GPU build:", tf.test.is_built_with_cuda()) 23 | print("Is GPU available:", tf.test.is_gpu_available()) 24 | print("Azure ML SDK version:", azureml.core.VERSION) 25 | 26 | outputs_folder = './outputs' 27 | os.makedirs(outputs_folder, exist_ok=True) 28 | 29 | run = Run.get_context() 30 | 31 | # Number of classes - do not change unless the data changes 32 | num_classes = 10 33 | 34 | # sizes of batch and # of epochs of data 35 | batch_size = 128 36 | epochs = 24 37 | 38 | # input image dimensions 39 | img_rows, img_cols = 28, 28 40 | 41 | # the data, shuffled and split between train and test sets 42 | (x_train, y_train), (x_test, y_test) = fashion_mnist.load_data() 43 | print('x_train shape:', x_train.shape) 44 | print('x_test shape:', x_test.shape) 45 | 46 | # Deal with format issues between different backends. Some put the # of channels in the image before the width and height of image. 47 | if K.image_data_format() == 'channels_first': 48 | x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols) 49 | x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols) 50 | input_shape = (1, img_rows, img_cols) 51 | else: 52 | x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1) 53 | x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1) 54 | input_shape = (img_rows, img_cols, 1) 55 | 56 | # Type convert and scale the test and training data 57 | x_train = x_train.astype('float32') 58 | x_test = x_test.astype('float32') 59 | x_train /= 255 60 | x_test /= 255 61 | print('x_train shape (after reshape):', x_train.shape) 62 | print('x_test shape (after reshape):', x_test.shape) 63 | 64 | img_index = 1 65 | plt.imsave('fashion.png', 1-x_train[img_index][:, :, 0], cmap='gray') 66 | run.log_image('Fashion Sample', path='fashion.png') 67 | 68 | print("Before:\n{}".format(y_train[:4])) 69 | # convert class vectors to binary class matrices. One-hot encoding 70 | # 3 => 0 0 0 1 0 0 0 0 0 0 and 1 => 0 1 0 0 0 0 0 0 0 0 71 | y_train = keras.utils.to_categorical(y_train, num_classes) 72 | y_test = keras.utils.to_categorical(y_test, num_classes) 73 | print("After:\n{}".format(y_train[:4])) # verify one-hot encoding 74 | 75 | # Define the model 76 | model = Sequential() 77 | model.add(Conv2D(32, kernel_size=(3, 3), 78 | activation='relu', 79 | input_shape=input_shape)) 80 | model.add(MaxPooling2D(pool_size=(2, 2))) 81 | model.add(Conv2D(64, (3, 3), activation='relu')) 82 | model.add(MaxPooling2D(pool_size=(2, 2))) 83 | model.add(Dropout(0.25)) 84 | model.add(Flatten()) 85 | model.add(Dense(128, activation='relu')) 86 | model.add(Dropout(0.5)) 87 | model.add(Dense(num_classes, activation='softmax')) 88 | 89 | # Take a look at the model summary 90 | model.summary() 91 | 92 | # define compile to minimize categorical loss, use ada delta optimized, and optimize to maximizing accuracy 93 | model.compile(loss=keras.losses.categorical_crossentropy, 94 | optimizer=keras.optimizers.Adam(), 95 | metrics=['accuracy']) 96 | 97 | # Define callbacks 98 | my_callbacks = [ 99 | EarlyStopping(monitor='val_accuracy', patience=5, mode='max'), 100 | ModelCheckpoint('./outputs/checkpoint.h5', verbose=1) 101 | ] 102 | 103 | # Train the model and test/validate the mode with the test data after each cycle (epoch) through the training data 104 | # Return history of loss and accuracy for each epoch 105 | hist = model.fit(x_train, y_train, 106 | batch_size=batch_size, 107 | epochs=epochs, 108 | verbose=1, 109 | callbacks=my_callbacks, 110 | validation_data=(x_test, y_test)) 111 | run.log_list('Training Loss', hist.history['loss']) 112 | run.log_list('Training Accuracy', hist.history['accuracy']) 113 | run.log_list('Validation Accuracy', hist.history['val_accuracy']) 114 | 115 | # Evaluate the model with the test data to get the scores on "real" data. 116 | score = model.evaluate(x_test, y_test, verbose=0) 117 | print('Test loss:', score[0]) 118 | print('Test accuracy:', score[1]) 119 | run.log('loss', score[0]) 120 | run.log('accuracy', score[1]) 121 | 122 | # Plot data to see relationships in training and validation data 123 | epoch_list = list(range(1, len(hist.history['accuracy']) + 1)) # values for x axis [1, 2, ..., # of epochs] 124 | plt.plot(epoch_list, hist.history['accuracy'], epoch_list, hist.history['val_accuracy']) 125 | plt.legend(('Training Accuracy', 'Validation Accuracy')) 126 | run.log_image(name='Accuracy', plot=plt) 127 | 128 | keras_path = os.path.join(outputs_folder, "keras") 129 | os.makedirs(keras_path, exist_ok=True) 130 | 131 | print("Exporting Keras models to", keras_path) 132 | with open(os.path.join(keras_path, "model.json"), 'w') as f: 133 | f.write(model.to_json()) 134 | model.save_weights(os.path.join(keras_path, 'model.h5')) 135 | 136 | model.save(os.path.join(keras_path, 'full_model.h5')) 137 | -------------------------------------------------------------------------------- /deploy-r-models/deploy-mmls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | IFS=$'\n\t' 4 | 5 | # -e: immediately exit if any command has a non-zero exit status 6 | # -o: prevents errors in a pipeline from being masked 7 | # IFS new value is less likely to cause confusing bugs when looping arrays or arguments (e.g. $@) 8 | 9 | usage() { echo "Usage: $0 -i -g -n -l -v -u -p " 1>&2; exit 1; } 10 | 11 | declare subscriptionId="" 12 | declare resourceGroupName="" 13 | declare deploymentName="msftmlsvr-`date '+%Y-%m-%d-%H-%M-%S'`" 14 | declare resourceGroupLocation="" 15 | declare vmPrefix="" 16 | declare username="" 17 | declare password="" 18 | 19 | # Initialize parameters specified from command line 20 | while getopts ":i:g:n:l:v:u:p:" arg; do 21 | case "${arg}" in 22 | i) 23 | subscriptionId=${OPTARG} 24 | ;; 25 | g) 26 | resourceGroupName=${OPTARG} 27 | ;; 28 | n) 29 | deploymentName=${OPTARG} 30 | ;; 31 | l) 32 | resourceGroupLocation=${OPTARG} 33 | ;; 34 | v) 35 | vmPrefix=${OPTARG} 36 | ;; 37 | u) 38 | username=${OPTARG} 39 | ;; 40 | p) 41 | password=${OPTARG} 42 | ;; 43 | esac 44 | done 45 | shift $((OPTIND-1)) 46 | 47 | # Requirements check: jq 48 | command -v jq >/dev/null 2>&1 || { echo >&2 "jq is required by this script but it's not installed. Please check https://stedolan.github.io/jq/download/ for details how to install jq."; exit 1; } 49 | 50 | #Prompt for parameters is some required parameters are missing 51 | if [[ -z "$subscriptionId" ]]; then 52 | echo "Your subscription ID can be looked up with the CLI using: az account show --out json " 53 | echo "Enter your subscription ID:" 54 | read subscriptionId 55 | [[ "${subscriptionId:?}" ]] 56 | fi 57 | 58 | if [[ -z "$resourceGroupName" ]]; then 59 | echo "This script will look for an existing resource group, otherwise a new one will be created " 60 | echo "You can create new resource groups with the CLI using: az group create " 61 | echo "Enter a resource group name: " 62 | read resourceGroupName 63 | [[ "${resourceGroupName:?}" ]] 64 | fi 65 | 66 | if [[ -z "$deploymentName" ]]; then 67 | echo "Enter a name for this deployment:" 68 | read deploymentName 69 | fi 70 | 71 | if [[ -z "$resourceGroupLocation" ]]; then 72 | echo "If creating a *new* resource group, you need to set a location " 73 | echo "You can lookup locations with the CLI using: az account list-locations " 74 | 75 | echo "Enter resource group location:" 76 | read resourceGroupLocation 77 | fi 78 | 79 | if [[ -z "$vmPrefix" ]]; then 80 | echo "Enter a name for the virtual machine:" 81 | read vmPrefix 82 | fi 83 | 84 | if [[ -z "$username" ]]; then 85 | echo "Enter a username for the vm admin:" 86 | read username 87 | fi 88 | 89 | if [[ -z "$password" ]]; then 90 | while : ; do 91 | echo -n "Enter a password for the vm admin:" 92 | read -s password 93 | echo 94 | echo -n "Please repeat the password for the vm admin:" 95 | read -s password_confirm 96 | echo 97 | if [[ "$password" == "$password_confirm" ]]; then 98 | break 99 | else 100 | echo "The passwords do not match. Please retry." 101 | fi 102 | done 103 | fi 104 | 105 | if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then 106 | echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty" 107 | usage 108 | fi 109 | 110 | #login to azure using your credentials 111 | az account show 1> /dev/null 112 | 113 | if [ $? != 0 ]; 114 | then 115 | az login 116 | fi 117 | 118 | #set the default subscription id 119 | az account set --subscription $subscriptionId 120 | 121 | set +e 122 | 123 | #Check for existing RG 124 | az group show --name $resourceGroupName 1> /dev/null 125 | 126 | if [ $? != 0 ]; then 127 | echo "Resource group with name" $resourceGroupName "could not be found. Creating new resource group.." 128 | set -e 129 | ( 130 | set -x 131 | az group create --name $resourceGroupName --location $resourceGroupLocation 1> /dev/null 132 | ) 133 | else 134 | echo "Using existing resource group..." 135 | fi 136 | 137 | echo "Starting deployment..." 138 | 139 | #Start deployment 140 | echo "Virtual Network..." 141 | ( 142 | set -x 143 | az network vnet create -g "$resourceGroupName" -n "$vmPrefix-vnet" --address-prefix 10.0.0.0/16 \ 144 | --subnet-name default --subnet-prefix 10.0.0.0/24 \ 145 | | jq -r .newVNet.provisioningState 146 | ) 147 | 148 | echo "Network Security Group with 3 Rules..." 149 | ( 150 | set -x 151 | az network nsg create -g "$resourceGroupName" -n "$vmPrefix-nsg" | jq -r .NewNSG.provisioningState 152 | 153 | az network nsg rule create -g "$resourceGroupName" --nsg-name "$vmPrefix-nsg" -n "MLSvr_WebNode" \ 154 | --priority 1000 --access Allow --protocol Tcp --direction Inbound \ 155 | --destination-address-prefixes '*' --destination-port-ranges 12800 \ 156 | | jq -r .provisioningState 157 | 158 | az network nsg rule create -g "$resourceGroupName" --nsg-name "$vmPrefix-nsg" -n "MLSvr_ComputeNode" \ 159 | --priority 1100 --access Allow --protocol Tcp --direction Inbound \ 160 | --destination-address-prefixes '*' --destination-port-ranges 12805 \ 161 | | jq -r .provisioningState 162 | 163 | az network nsg rule create -g "$resourceGroupName" --nsg-name "$vmPrefix-nsg" -n "MLSvr_RServe" \ 164 | --priority 1200 --access Allow --protocol Tcp --direction Inbound \ 165 | --destination-address-prefixes '*' --destination-port-ranges 9054 \ 166 | | jq -r .provisioningState 167 | ) 168 | 169 | echo "Public IP & NIC..." 170 | ( 171 | set -x 172 | az network public-ip create -g "$resourceGroupName" -n "$vmPrefix-ip" --sku Basic \ 173 | | jq -r .publicIp.provisioningState 174 | az network nic create -g "$resourceGroupName" -n "$vmPrefix-nic" --vnet-name "$vmPrefix-vnet" \ 175 | --subnet default --network-security-group "$vmPrefix-nsg" --public-ip-address "$vmPrefix-ip" \ 176 | | jq -r .NewNIC.provisioningState 177 | ) 178 | 179 | echo "Virtual Machine..." 180 | ( 181 | az vm create -g "$resourceGroupName" -n "$vmPrefix" \ 182 | --image Canonical:UbuntuServer:16.04-LTS:latest --size Standard_D2s_v3 \ 183 | --authentication-type password --admin-username "$username" --admin-password $password \ 184 | --nics "$vmPrefix-nic" --os-disk-name "$vmPrefix-osdisk" --enable-agent "true" 185 | ) 186 | 187 | echo "Microsoft Machine Learning Server..." 188 | ( 189 | az vm extension set -g "$resourceGroupName" -n "customScript" \ 190 | --vm-name "$vmPrefix" --publisher Microsoft.Azure.Extensions \ 191 | --protected-settings "{\"fileUris\": [\"https://raw.githubusercontent.com/SaschaDittmann/machine-learning-in-practice/master/deploy-r-models/setup/install-mmls-ubuntu.sh\"],\"commandToExecute\": \"./install-mmls-ubuntu.sh -p '$password'\"}" \ 192 | | jq -r .provisioningState 193 | ) 194 | 195 | if [ $? == 0 ]; 196 | then 197 | echo "Microsoft Machine Learning Server has been successfully deployed" 198 | fi 199 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # Tool Runtime Dir 7 | /[Tt]ools/ 8 | 9 | # User-specific files 10 | *.suo 11 | *.user 12 | *.userosscache 13 | *.sln.docstates 14 | 15 | # User-specific files (MonoDevelop/Xamarin Studio) 16 | *.userprefs 17 | 18 | # Build results 19 | [Dd]ebug/ 20 | [Dd]ebugPublic/ 21 | [Rr]elease/ 22 | [Rr]eleases/ 23 | x64/ 24 | x86/ 25 | bld/ 26 | [Bb]in/ 27 | [Oo]bj/ 28 | [Ll]og/ 29 | msbuild.log 30 | msbuild.err 31 | msbuild.wrn 32 | msbuild.binlog 33 | 34 | # Visual Studio 2015/2017 cache/options directory 35 | .vs/ 36 | # Uncomment if you have tasks that create the project's static files in wwwroot 37 | #wwwroot/ 38 | 39 | # Visual Studio 2017 auto generated files 40 | Generated\ Files/ 41 | 42 | # MSTest test Results 43 | [Tt]est[Rr]esult*/ 44 | [Bb]uild[Ll]og.* 45 | 46 | # NUNIT 47 | *.VisualState.xml 48 | TestResult.xml 49 | 50 | # Build Results of an ATL Project 51 | [Dd]ebugPS/ 52 | [Rr]eleasePS/ 53 | dlldata.c 54 | 55 | # Benchmark Results 56 | BenchmarkDotNet.Artifacts/ 57 | 58 | # .NET Core 59 | project.lock.json 60 | project.fragment.lock.json 61 | artifacts/ 62 | **/Properties/launchSettings.json 63 | 64 | # StyleCop 65 | StyleCopReport.xml 66 | 67 | # Files built by Visual Studio 68 | *_i.c 69 | *_p.c 70 | *_i.h 71 | *.ilk 72 | *.meta 73 | *.obj 74 | *.pch 75 | *.pdb 76 | *.pgc 77 | *.pgd 78 | *.rsp 79 | *.sbr 80 | *.tlb 81 | *.tli 82 | *.tlh 83 | *.tmp 84 | *.tmp_proj 85 | *.log 86 | *.vspscc 87 | *.vssscc 88 | .builds 89 | *.pidb 90 | *.svclog 91 | *.scc 92 | 93 | # Chutzpah Test files 94 | _Chutzpah* 95 | 96 | # Visual C++ cache files 97 | ipch/ 98 | *.aps 99 | *.ncb 100 | *.opendb 101 | *.opensdf 102 | *.sdf 103 | *.cachefile 104 | *.VC.db 105 | *.VC.VC.opendb 106 | 107 | # Visual Studio profiler 108 | *.psess 109 | *.vsp 110 | *.vspx 111 | *.sap 112 | 113 | # Visual Studio Trace Files 114 | *.e2e 115 | 116 | # TFS 2012 Local Workspace 117 | $tf/ 118 | 119 | # Guidance Automation Toolkit 120 | *.gpState 121 | 122 | # ReSharper is a .NET coding add-in 123 | _ReSharper*/ 124 | *.[Rr]e[Ss]harper 125 | *.DotSettings.user 126 | 127 | # JustCode is a .NET coding add-in 128 | .JustCode 129 | 130 | # TeamCity is a build add-in 131 | _TeamCity* 132 | 133 | # DotCover is a Code Coverage Tool 134 | *.dotCover 135 | 136 | # AxoCover is a Code Coverage Tool 137 | .axoCover/* 138 | !.axoCover/settings.json 139 | 140 | # Visual Studio code coverage results 141 | *.coverage 142 | *.coveragexml 143 | 144 | # NCrunch 145 | _NCrunch_* 146 | .*crunch*.local.xml 147 | nCrunchTemp_* 148 | 149 | # MightyMoose 150 | *.mm.* 151 | AutoTest.Net/ 152 | 153 | # Web workbench (sass) 154 | .sass-cache/ 155 | 156 | # Installshield output folder 157 | [Ee]xpress/ 158 | 159 | # DocProject is a documentation generator add-in 160 | DocProject/buildhelp/ 161 | DocProject/Help/*.HxT 162 | DocProject/Help/*.HxC 163 | DocProject/Help/*.hhc 164 | DocProject/Help/*.hhk 165 | DocProject/Help/*.hhp 166 | DocProject/Help/Html2 167 | DocProject/Help/html 168 | 169 | # Click-Once directory 170 | publish/ 171 | 172 | # Publish Web Output 173 | *.[Pp]ublish.xml 174 | *.azurePubxml 175 | # Note: Comment the next line if you want to checkin your web deploy settings, 176 | # but database connection strings (with potential passwords) will be unencrypted 177 | *.pubxml 178 | *.publishproj 179 | 180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 181 | # checkin your Azure Web App publish settings, but sensitive information contained 182 | # in these scripts will be unencrypted 183 | PublishScripts/ 184 | 185 | # NuGet Packages 186 | *.nupkg 187 | # The packages folder can be ignored because of Package Restore 188 | **/[Pp]ackages/* 189 | # except build/, which is used as an MSBuild target. 190 | !**/[Pp]ackages/build/ 191 | # Uncomment if necessary however generally it will be regenerated when needed 192 | #!**/[Pp]ackages/repositories.config 193 | # NuGet v3's project.json files produces more ignorable files 194 | *.nuget.props 195 | *.nuget.targets 196 | 197 | # Microsoft Azure Build Output 198 | csx/ 199 | *.build.csdef 200 | 201 | # Microsoft Azure Emulator 202 | ecf/ 203 | rcf/ 204 | 205 | # Windows Store app package directories and files 206 | AppPackages/ 207 | BundleArtifacts/ 208 | Package.StoreAssociation.xml 209 | _pkginfo.txt 210 | *.appx 211 | 212 | # Visual Studio cache files 213 | # files ending in .cache can be ignored 214 | *.[Cc]ache 215 | # but keep track of directories ending in .cache 216 | !*.[Cc]ache/ 217 | 218 | # Others 219 | ClientBin/ 220 | ~$* 221 | *~ 222 | *.dbmdl 223 | *.dbproj.schemaview 224 | *.jfm 225 | *.pfx 226 | *.publishsettings 227 | orleans.codegen.cs 228 | 229 | # Including strong name files can present a security risk 230 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 231 | #*.snk 232 | 233 | # Since there are multiple workflows, uncomment next line to ignore bower_components 234 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 235 | #bower_components/ 236 | 237 | # RIA/Silverlight projects 238 | Generated_Code/ 239 | 240 | # Backup & report files from converting an old project file 241 | # to a newer Visual Studio version. Backup files are not needed, 242 | # because we have git ;-) 243 | _UpgradeReport_Files/ 244 | Backup*/ 245 | UpgradeLog*.XML 246 | UpgradeLog*.htm 247 | ServiceFabricBackup/ 248 | 249 | # SQL Server files 250 | *.mdf 251 | *.ldf 252 | *.ndf 253 | 254 | # Business Intelligence projects 255 | *.rdl.data 256 | *.bim.layout 257 | *.bim_*.settings 258 | 259 | # Microsoft Fakes 260 | FakesAssemblies/ 261 | 262 | # GhostDoc plugin setting file 263 | *.GhostDoc.xml 264 | 265 | # Node.js Tools for Visual Studio 266 | .ntvs_analysis.dat 267 | node_modules/ 268 | 269 | # TypeScript v1 declaration files 270 | typings/ 271 | 272 | # Visual Studio 6 build log 273 | *.plg 274 | 275 | # Visual Studio 6 workspace options file 276 | *.opt 277 | 278 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 279 | *.vbw 280 | 281 | # Visual Studio LightSwitch build output 282 | **/*.HTMLClient/GeneratedArtifacts 283 | **/*.DesktopClient/GeneratedArtifacts 284 | **/*.DesktopClient/ModelManifest.xml 285 | **/*.Server/GeneratedArtifacts 286 | **/*.Server/ModelManifest.xml 287 | _Pvt_Extensions 288 | 289 | # Paket dependency manager 290 | .paket/paket.exe 291 | paket-files/ 292 | 293 | # FAKE - F# Make 294 | .fake/ 295 | 296 | # JetBrains Rider 297 | .idea/ 298 | *.sln.iml 299 | 300 | # CodeRush 301 | .cr/ 302 | 303 | # Python Tools for Visual Studio (PTVS) 304 | __pycache__/ 305 | *.pyc 306 | 307 | # Cake - Uncomment if you are using it 308 | # tools/** 309 | # !tools/packages.config 310 | 311 | # Tabs Studio 312 | *.tss 313 | 314 | # Telerik's JustMock configuration file 315 | *.jmconfig 316 | 317 | # BizTalk build output 318 | *.btp.cs 319 | *.btm.cs 320 | *.odx.cs 321 | *.xsd.cs 322 | 323 | # OpenCover UI analysis results 324 | OpenCover/ 325 | 326 | # Azure Stream Analytics local run output 327 | ASALocalRun/ 328 | 329 | # MSBuild Binary and Structured Log 330 | *.binlog 331 | # Ignore external test datasets. 332 | /test/data/external/ 333 | 334 | # misc 335 | .ionide 336 | .vscode 337 | .secrets 338 | .Rhistory 339 | *.rds 340 | *swagger.json 341 | -------------------------------------------------------------------------------- /azure-ml-svc-prep-train-py/03-Fashion-MNIST-AzureMLCompute.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Fashion MNIST\n", 8 | "\n", 9 | "## Validate Azure ML SDK installation and get version number for debugging purposes" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "# Check core SDK version number\n", 19 | "import azureml.core\n", 20 | "print(\"SDK version:\", azureml.core.VERSION)" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "## Initialize Workspace\n", 28 | "Initialize a workspace object from persisted configuration." 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": null, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "# Initialize Workspace\n", 38 | "from azureml.core import Workspace\n", 39 | "\n", 40 | "ws = Workspace.from_config()\n", 41 | "print(\"Resource group: \", ws.resource_group)\n", 42 | "print(\"Location: \", ws.location)\n", 43 | "print(\"Workspace name: \", ws.name)" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "## Create a project directory\n", 51 | "Create a directory that will contain all the necessary code from your local machine that you will need access to on the remote resource. This includes the training script, and any additional files your training script depends on." 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "import os\n", 61 | "import shutil\n", 62 | "\n", 63 | "project_folder = './fashion_mnist_amlcompute'\n", 64 | "os.makedirs(project_folder, exist_ok=True)\n", 65 | "\n", 66 | "train_folder = os.path.join(project_folder, 'train')\n", 67 | "os.makedirs(train_folder, exist_ok=True)\n", 68 | "\n", 69 | "shutil.copy('./train/train_Fashion_MNIST.py', train_folder)" 70 | ] 71 | }, 72 | { 73 | "cell_type": "markdown", 74 | "metadata": {}, 75 | "source": [ 76 | "## Create An Experiment\n", 77 | "**Experiment** is a logical container in an Azure ML Workspace. It hosts run records which can include run metrics and output artifacts from your experiments." 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "from azureml.core import Experiment\n", 87 | "experiment_name = 'fashion-mnist'\n", 88 | "experiment = Experiment(workspace = ws, name = experiment_name)" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "## Create Azure ML Compute cluster (GPU-enabled) as a compute target" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "from azureml.core.compute import AmlCompute\n", 105 | "from azureml.core.compute_target import ComputeTargetException\n", 106 | "\n", 107 | "compute_target_name = 'gpu-cluster'\n", 108 | "\n", 109 | "try:\n", 110 | " aml_compute = AmlCompute(workspace=ws, name=compute_target_name)\n", 111 | " print('found existing:', aml_compute.name)\n", 112 | "except ComputeTargetException:\n", 113 | " print('creating new.')\n", 114 | " aml_config = AmlCompute.provisioning_configuration(\n", 115 | " vm_size=\"Standard_NC6\",\n", 116 | " vm_priority=\"dedicated\",\n", 117 | " min_nodes = 0,\n", 118 | " max_nodes = 4,\n", 119 | " idle_seconds_before_scaledown=300\n", 120 | " )\n", 121 | " aml_compute = AmlCompute.create(\n", 122 | " ws, \n", 123 | " name=compute_target_name, \n", 124 | " provisioning_configuration=aml_config\n", 125 | " )\n", 126 | " aml_compute.wait_for_completion(show_output=True)" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "## Create a TensorFlow estimator\n", 134 | "The AML SDK's TensorFlow estimator enables you to easily submit TensorFlow training jobs for both single-node and distributed runs. For more information on the TensorFlow estimator, refer [here](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-train-tensorflow)." 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "from azureml.train.dnn import TensorFlow\n", 144 | "\n", 145 | "estimator = TensorFlow(source_directory=train_folder,\n", 146 | " compute_target=aml_compute,\n", 147 | " entry_script='train_Fashion_MNIST.py',\n", 148 | " node_count=1,\n", 149 | " conda_packages=['matplotlib=3.1.2'],\n", 150 | " framework_version='2.0',\n", 151 | " use_gpu=True)" 152 | ] 153 | }, 154 | { 155 | "cell_type": "markdown", 156 | "metadata": {}, 157 | "source": [ 158 | "## Submit the Experiment\n", 159 | "Finally, run the training job on Azure ML Compute" 160 | ] 161 | }, 162 | { 163 | "cell_type": "code", 164 | "execution_count": null, 165 | "metadata": {}, 166 | "outputs": [], 167 | "source": [ 168 | "run = experiment.submit(estimator)\n", 169 | "run.tag(\"Description\",\"AML Compute trained Fashion MNIST model\")" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": null, 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "run" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": null, 184 | "metadata": { 185 | "scrolled": true 186 | }, 187 | "outputs": [], 188 | "source": [ 189 | "run.wait_for_completion(show_output=True)" 190 | ] 191 | }, 192 | { 193 | "cell_type": "markdown", 194 | "metadata": {}, 195 | "source": [ 196 | "## Show Metrics" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": {}, 203 | "outputs": [], 204 | "source": [ 205 | "# get all metris logged in the run\n", 206 | "run.get_metrics()\n", 207 | "metrics = run.get_metrics()\n", 208 | "\n", 209 | "import numpy as np\n", 210 | "print('loss is {0:.2f}, and accuracy is {1:0.2f}'.format(\n", 211 | " metrics['loss'], \n", 212 | " metrics['accuracy']\n", 213 | "))" 214 | ] 215 | }, 216 | { 217 | "cell_type": "code", 218 | "execution_count": null, 219 | "metadata": {}, 220 | "outputs": [], 221 | "source": [ 222 | "# Plot data to see relationships in training and validation data\n", 223 | "import numpy as np\n", 224 | "import matplotlib.pyplot as plt\n", 225 | "epoch_list = list(range(1, len(metrics['Training Accuracy']) + 1)) # values for x axis [1, 2, ..., # of epochs]\n", 226 | "plt.plot(epoch_list, metrics['Training Accuracy'], epoch_list, metrics['Validation Accuracy'])\n", 227 | "plt.legend(('Training Accuracy', 'Validation Accuracy'))\n", 228 | "plt.show()" 229 | ] 230 | }, 231 | { 232 | "cell_type": "markdown", 233 | "metadata": {}, 234 | "source": [ 235 | "## Download Model" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": null, 241 | "metadata": {}, 242 | "outputs": [], 243 | "source": [ 244 | "# show all the files stored within the run record\n", 245 | "run.get_file_names()" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": null, 251 | "metadata": {}, 252 | "outputs": [], 253 | "source": [ 254 | "import os\n", 255 | "\n", 256 | "outputs_path = os.path.join(project_folder, \"outputs\")\n", 257 | "os.makedirs(outputs_path, exist_ok=True)\n", 258 | "\n", 259 | "for filename in run.get_file_names():\n", 260 | " if filename.startswith('outputs'):\n", 261 | " path = os.path.join(project_folder, filename)\n", 262 | " print(\"Downloading \" + filename)\n", 263 | " run.download_file(filename, output_file_path=outputs_path)" 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": null, 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [] 272 | } 273 | ], 274 | "metadata": { 275 | "kernelspec": { 276 | "display_name": "Python 3", 277 | "language": "python", 278 | "name": "python3" 279 | }, 280 | "language_info": { 281 | "codemirror_mode": { 282 | "name": "ipython", 283 | "version": 3 284 | }, 285 | "file_extension": ".py", 286 | "mimetype": "text/x-python", 287 | "name": "python", 288 | "nbconvert_exporter": "python", 289 | "pygments_lexer": "ipython3", 290 | "version": "3.6.8" 291 | } 292 | }, 293 | "nbformat": 4, 294 | "nbformat_minor": 2 295 | } 296 | -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/ICarsService.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels 6 | { 7 | using System; 8 | using System.Collections.Generic; 9 | using System.Net.Http; 10 | using System.Threading; 11 | using System.Threading.Tasks; 12 | using Newtonsoft.Json; 13 | using Microsoft.Rest; 14 | using Models; 15 | 16 | /// 17 | /// 18 | public partial interface ICarsService : IDisposable 19 | { 20 | /// 21 | /// The base URI of the service. 22 | /// 23 | Uri BaseUri { get; set; } 24 | 25 | /// 26 | /// Gets or sets json serialization settings. 27 | /// 28 | JsonSerializerSettings SerializationSettings { get; } 29 | 30 | /// 31 | /// Gets or sets json deserialization settings. 32 | /// 33 | JsonSerializerSettings DeserializationSettings { get; } 34 | 35 | /// 36 | /// Subscription credentials which uniquely identify client 37 | /// subscription. 38 | /// 39 | ServiceClientCredentials Credentials { get; } 40 | 41 | 42 | /// 43 | /// Logs the user in 44 | /// 45 | /// 46 | /// 47 | /// 48 | /// The headers that will be added to request. 49 | /// 50 | /// 51 | /// The cancellation token. 52 | /// 53 | Task> LoginWithHttpMessagesAsync(LoginRequest loginRequest, Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 54 | 55 | /// 56 | /// The user renews access token and refresh token 57 | /// 58 | /// 59 | /// 60 | /// 61 | /// The headers that will be added to request. 62 | /// 63 | /// 64 | /// The cancellation token. 65 | /// 66 | Task> RenewTokenWithHttpMessagesAsync(RenewTokenRequest renewTokenRequest, Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 67 | 68 | /// 69 | /// The user revokes a refresh token 70 | /// 71 | /// 72 | /// The refresh token to be revoked 73 | /// 74 | /// 75 | /// The headers that will be added to request. 76 | /// 77 | /// 78 | /// The cancellation token. 79 | /// 80 | Task> RevokeRefreshTokenWithHttpMessagesAsync(string refreshToken, Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 81 | 82 | /// 83 | /// Consume the carsService web service. 84 | /// 85 | /// 86 | /// Input parameters to the web service. 87 | /// 88 | /// 89 | /// The headers that will be added to request. 90 | /// 91 | /// 92 | /// The cancellation token. 93 | /// 94 | Task> ManualTransmissionWithHttpMessagesAsync(InputParameters webServiceParameters, Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 95 | 96 | /// 97 | /// Consume the carsService web service asynchronously. 98 | /// 99 | /// 100 | /// Input parameters to the web service. 101 | /// 102 | /// 103 | /// Number of threads used to process entries in the batch. Default 104 | /// value is 10. Please make sure not to use too high of a number 105 | /// because it might negatively impact performance. 106 | /// 107 | /// 108 | /// The headers that will be added to request. 109 | /// 110 | /// 111 | /// The cancellation token. 112 | /// 113 | Task> StartBatchExecutionWithHttpMessagesAsync(IList batchWebServiceParameters, int? parallelCount = default(int?), Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 114 | 115 | /// 116 | /// Gets all batch executions for carsService. 117 | /// 118 | /// 119 | /// The headers that will be added to request. 120 | /// 121 | /// 122 | /// The cancellation token. 123 | /// 124 | Task>> GetBatchExecutionsWithHttpMessagesAsync(Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 125 | 126 | /// 127 | /// Gets all batch executions for carsService. 128 | /// 129 | /// 130 | /// Execution id of the execution 131 | /// 132 | /// 133 | /// Returns the already processed results of the batch execution even 134 | /// if it hasn't been fully completed. 135 | /// 136 | /// 137 | /// The headers that will be added to request. 138 | /// 139 | /// 140 | /// The cancellation token. 141 | /// 142 | Task> GetBatchExecutionStatusWithHttpMessagesAsync(string executionId, bool? showPartialResults = default(bool?), Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 143 | 144 | /// 145 | /// Cancels and deletes all batch executions for carsService. 146 | /// 147 | /// 148 | /// Execution id of the execution. 149 | /// 150 | /// 151 | /// The headers that will be added to request. 152 | /// 153 | /// 154 | /// The cancellation token. 155 | /// 156 | Task>> CancelAndDeleteBatchExecutionWithHttpMessagesAsync(string executionId, Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 157 | 158 | /// 159 | /// Gets all files from an individual execution in carsService. 160 | /// 161 | /// 162 | /// Execution id of the execution 163 | /// 164 | /// 165 | /// Index of the execution in the batch. 166 | /// 167 | /// 168 | /// The headers that will be added to request. 169 | /// 170 | /// 171 | /// The cancellation token. 172 | /// 173 | Task>> GetBatchExecutionFilesWithHttpMessagesAsync(string executionId, int index, Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 174 | 175 | /// 176 | /// Gets a specific file from an execution in carsService. 177 | /// 178 | /// 179 | /// Execution id of the execution 180 | /// 181 | /// 182 | /// Index of the execution in the batch. 183 | /// 184 | /// 185 | /// Name of the file to be returned. 186 | /// 187 | /// 188 | /// The headers that will be added to request. 189 | /// 190 | /// 191 | /// The cancellation token. 192 | /// 193 | Task> GetBatchExecutionFileWithHttpMessagesAsync(string executionId, int index, string fileName, Dictionary> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); 194 | 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /azure-ml-svc-prep-train-py/01-Fashion-MNIST-Local.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Fashion MNIST" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Import libraries" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "from __future__ import print_function\n", 24 | "import warnings\n", 25 | "warnings.filterwarnings('ignore')" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import os\n", 35 | "import numpy as np\n", 36 | "from functools import partial\n", 37 | "import tensorflow as tf\n", 38 | "from tensorflow import keras\n", 39 | "from tensorflow.keras.datasets import fashion_mnist\n", 40 | "from tensorflow.keras.models import Sequential\n", 41 | "from tensorflow.keras.layers import Dense, Dropout, Flatten\n", 42 | "from tensorflow.keras.layers import Conv2D, MaxPooling2D\n", 43 | "from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint\n", 44 | "from tensorflow.keras import backend as K\n", 45 | "\n", 46 | "print(\"TensorFlow version:\", tf.__version__)\n", 47 | "print(\"Using GPU build:\", tf.test.is_built_with_cuda())\n", 48 | "print(\"Is GPU available:\", tf.test.is_gpu_available())" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "## Create a project directory\n", 56 | "Create a directory that will contain all the output from this experiment." 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "project_folder = './fashion_mnist_local'\n", 66 | "os.makedirs(project_folder, exist_ok=True)" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "metadata": {}, 72 | "source": [ 73 | "## Download and prepare dataset" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "# Number of classes - do not change unless the data changes\n", 83 | "num_classes = 10\n", 84 | "\n", 85 | "# sizes of batch and # of epochs of data\n", 86 | "batch_size = 128\n", 87 | "epochs = 24\n", 88 | "\n", 89 | "# input image dimensions\n", 90 | "img_rows, img_cols = 28, 28\n", 91 | "\n", 92 | "# the data, shuffled and split between train and test sets\n", 93 | "(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()\n", 94 | "print('x_train shape:', x_train.shape)\n", 95 | "print('x_test shape:', x_test.shape)\n", 96 | "\n", 97 | "# Deal with format issues between different backends. Some put the # of channels in the image before the width and height of image.\n", 98 | "if K.image_data_format() == 'channels_first':\n", 99 | " x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n", 100 | " x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n", 101 | " input_shape = (1, img_rows, img_cols)\n", 102 | "else:\n", 103 | " x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n", 104 | " x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n", 105 | " input_shape = (img_rows, img_cols, 1)\n", 106 | "\n", 107 | "# Type convert and scale the test and training data\n", 108 | "x_train = x_train.astype('float32')\n", 109 | "x_test = x_test.astype('float32')\n", 110 | "x_train /= 255\n", 111 | "x_test /= 255\n", 112 | "print('x_train shape (after reshape):', x_train.shape)\n", 113 | "print('x_test shape (after reshape):', x_test.shape)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "import numpy as np\n", 123 | "import matplotlib.pyplot as plt\n", 124 | "\n", 125 | "img_index = 1\n", 126 | "plt.imshow(1-x_train[img_index][:, :, 0], cmap='gray')" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "# Label Description \n", 136 | "label_dict = {\n", 137 | " 0: 'T-shirt/top',\n", 138 | " 1: 'Trouser',\n", 139 | " 2: 'Pullover',\n", 140 | " 3: 'Dress',\n", 141 | " 4: 'Coat',\n", 142 | " 5: 'Sandal',\n", 143 | " 6: 'Shirt',\n", 144 | " 7: 'Sneaker',\n", 145 | " 8: 'Bag',\n", 146 | " 9: 'Ankle boot'\n", 147 | "}" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "print(\"Before:\\n{}\".format(y_train[:4]))\n", 157 | "# convert class vectors to binary class matrices. One-hot encoding\n", 158 | "# 3 => 0 0 0 1 0 0 0 0 0 0 and 1 => 0 1 0 0 0 0 0 0 0 0 \n", 159 | "y_train = keras.utils.to_categorical(y_train, num_classes)\n", 160 | "y_test = keras.utils.to_categorical(y_test, num_classes)\n", 161 | "print(\"After:\\n{}\".format(y_train[:4])) # verify one-hot encoding" 162 | ] 163 | }, 164 | { 165 | "cell_type": "markdown", 166 | "metadata": {}, 167 | "source": [ 168 | "## Define the model" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "metadata": {}, 175 | "outputs": [], 176 | "source": [ 177 | "model = Sequential()\n", 178 | "model.add(Conv2D(32, kernel_size=(3, 3),\n", 179 | " activation='relu',\n", 180 | " input_shape=input_shape))\n", 181 | "model.add(MaxPooling2D(pool_size=(2, 2)))\n", 182 | "model.add(Conv2D(64, (3, 3), activation='relu'))\n", 183 | "model.add(MaxPooling2D(pool_size=(2, 2)))\n", 184 | "model.add(Dropout(0.25))\n", 185 | "model.add(Flatten())\n", 186 | "model.add(Dense(128, activation='relu'))\n", 187 | "model.add(Dropout(0.5))\n", 188 | "model.add(Dense(num_classes, activation='softmax'))\n", 189 | "\n", 190 | "# Take a look at the model summary\n", 191 | "model.summary()" 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": null, 197 | "metadata": {}, 198 | "outputs": [], 199 | "source": [ 200 | "# define compile to minimize categorical loss, use ada delta optimized, and optimize to maximizing accuracy\n", 201 | "model.compile(loss=keras.losses.categorical_crossentropy,\n", 202 | " optimizer=keras.optimizers.Adam(),\n", 203 | " metrics=['accuracy'])" 204 | ] 205 | }, 206 | { 207 | "cell_type": "markdown", 208 | "metadata": {}, 209 | "source": [ 210 | "## Train model" 211 | ] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "execution_count": null, 216 | "metadata": {}, 217 | "outputs": [], 218 | "source": [ 219 | "# Define early stopping callback\n", 220 | "my_callbacks = [\n", 221 | " EarlyStopping(monitor='val_accuracy', patience=5, mode='max'),\n", 222 | " ModelCheckpoint(os.path.join(project_folder, \"checkpoint.h5\"), verbose=1)\n", 223 | "]" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": {}, 230 | "outputs": [], 231 | "source": [ 232 | "# Train the model and test/validate the mode with the test data after each cycle (epoch) through the training data\n", 233 | "# Return history of loss and accuracy for each epoch\n", 234 | "hist = model.fit(x_train, y_train,\n", 235 | " batch_size=batch_size,\n", 236 | " epochs=epochs,\n", 237 | " verbose=1,\n", 238 | " callbacks=my_callbacks,\n", 239 | " validation_data=(x_test, y_test))" 240 | ] 241 | }, 242 | { 243 | "cell_type": "code", 244 | "execution_count": null, 245 | "metadata": {}, 246 | "outputs": [], 247 | "source": [ 248 | "# Evaluate the model with the test data to get the scores on \"real\" data.\n", 249 | "score = model.evaluate(x_test, y_test, verbose=0)\n", 250 | "print('Test loss:', score[0])\n", 251 | "print('Test accuracy:', score[1])" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": null, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "# Plot data to see relationships in training and validation data\n", 261 | "import numpy as np\n", 262 | "import matplotlib.pyplot as plt\n", 263 | "epoch_list = list(range(1, len(hist.history['accuracy']) + 1)) # values for x axis [1, 2, ..., # of epochs]\n", 264 | "plt.plot(epoch_list, hist.history['accuracy'], epoch_list, hist.history['val_accuracy'])\n", 265 | "plt.legend(('Training Accuracy', 'Validation Accuracy'))\n", 266 | "plt.show()" 267 | ] 268 | }, 269 | { 270 | "cell_type": "markdown", 271 | "metadata": {}, 272 | "source": [ 273 | "## Look at some predictions" 274 | ] 275 | }, 276 | { 277 | "cell_type": "code", 278 | "execution_count": null, 279 | "metadata": {}, 280 | "outputs": [], 281 | "source": [ 282 | "def run_prediction(idx):\n", 283 | " result = np.argmax(model.predict(x_test[idx:idx+1]))\n", 284 | " label = np.argmax(y_test[idx])\n", 285 | " print('Prediction: {} ({})'.format(result, label_dict[result]))\n", 286 | " print('Label: {} ({})'.format(label, label_dict[label]))\n", 287 | " #plt.imshow(1-x_test[idx][:, :, 0], cmap='gray')" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": null, 293 | "metadata": {}, 294 | "outputs": [], 295 | "source": [ 296 | "import random\n", 297 | "\n", 298 | "for _ in range(1,10):\n", 299 | " idx = random.randint(0, 47-1)\n", 300 | " run_prediction(idx)" 301 | ] 302 | }, 303 | { 304 | "cell_type": "markdown", 305 | "metadata": {}, 306 | "source": [ 307 | "## Keras exports" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": null, 313 | "metadata": {}, 314 | "outputs": [], 315 | "source": [ 316 | "keras_path = os.path.join(project_folder, \"keras\")\n", 317 | "os.makedirs(keras_path, exist_ok=True)\n", 318 | "\n", 319 | "with open(os.path.join(keras_path, \"model.json\"), 'w') as f:\n", 320 | " f.write(model.to_json())\n", 321 | "model.save_weights(os.path.join(keras_path, 'model.h5'))\n", 322 | "\n", 323 | "model.save(os.path.join(keras_path, 'full_model.h5'))" 324 | ] 325 | }, 326 | { 327 | "cell_type": "code", 328 | "execution_count": null, 329 | "metadata": {}, 330 | "outputs": [], 331 | "source": [] 332 | } 333 | ], 334 | "metadata": { 335 | "kernelspec": { 336 | "display_name": "Python 3.6 - AzureML", 337 | "language": "python", 338 | "name": "python3-azureml" 339 | }, 340 | "language_info": { 341 | "codemirror_mode": { 342 | "name": "ipython", 343 | "version": 3 344 | }, 345 | "file_extension": ".py", 346 | "mimetype": "text/x-python", 347 | "name": "python", 348 | "nbconvert_exporter": "python", 349 | "pygments_lexer": "ipython3", 350 | "version": "3.6.9" 351 | }, 352 | "nteract": { 353 | "version": "nteract-front-end@1.0.0" 354 | } 355 | }, 356 | "nbformat": 4, 357 | "nbformat_minor": 2 358 | } 359 | -------------------------------------------------------------------------------- /azure-ml-svc-prep-train-py/02-Fashion-MNIST-LogToAzure.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Fashion MNIST" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Import libraries" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "from __future__ import print_function\n", 24 | "import warnings\n", 25 | "warnings.filterwarnings('ignore')" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import os\n", 35 | "import numpy as np\n", 36 | "from functools import partial\n", 37 | "import tensorflow as tf\n", 38 | "from tensorflow import keras\n", 39 | "from tensorflow.keras.datasets import fashion_mnist\n", 40 | "from tensorflow.keras.models import Sequential\n", 41 | "from tensorflow.keras.layers import Dense, Dropout, Flatten\n", 42 | "from tensorflow.keras.layers import Conv2D, MaxPooling2D\n", 43 | "from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint\n", 44 | "from tensorflow.keras import backend as K\n", 45 | "\n", 46 | "print(\"TensorFlow version:\", tf.__version__)\n", 47 | "print(\"Using GPU build:\", tf.test.is_built_with_cuda())\n", 48 | "print(\"Is GPU available:\", tf.test.is_gpu_available())" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "## Connect to Azure Machine Learning Services" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "from azureml.core import Experiment, Run, Workspace\n", 65 | "import azureml.core\n", 66 | "\n", 67 | "# Check core SDK version number\n", 68 | "print(\"SDK version:\", azureml.core.VERSION)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "ws = Workspace.from_config()\n", 78 | "print('Workspace name: ' + ws.name, \n", 79 | " 'Azure region: ' + ws.location, \n", 80 | " 'Resource group: ' + ws.resource_group, sep='\\n')" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "metadata": {}, 86 | "source": [ 87 | "## Create a project directory\n", 88 | "Create a directory that will contain all the output from this experiment." 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "metadata": {}, 95 | "outputs": [], 96 | "source": [ 97 | "project_folder = './fashion_mnist_logtoazure'\n", 98 | "os.makedirs(project_folder, exist_ok=True)" 99 | ] 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "metadata": {}, 104 | "source": [ 105 | "## Download and prepare dataset" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "metadata": {}, 112 | "outputs": [], 113 | "source": [ 114 | "# Number of classes - do not change unless the data changes\n", 115 | "num_classes = 10\n", 116 | "\n", 117 | "# sizes of batch and # of epochs of data\n", 118 | "batch_size = 128\n", 119 | "epochs = 24\n", 120 | "\n", 121 | "# input image dimensions\n", 122 | "img_rows, img_cols = 28, 28\n", 123 | "\n", 124 | "# the data, shuffled and split between train and test sets\n", 125 | "(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()\n", 126 | "print('x_train shape:', x_train.shape)\n", 127 | "print('x_test shape:', x_test.shape)\n", 128 | "\n", 129 | "# Deal with format issues between different backends. Some put the # of channels in the image before the width and height of image.\n", 130 | "if K.image_data_format() == 'channels_first':\n", 131 | " x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n", 132 | " x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n", 133 | " input_shape = (1, img_rows, img_cols)\n", 134 | "else:\n", 135 | " x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n", 136 | " x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n", 137 | " input_shape = (img_rows, img_cols, 1)\n", 138 | "\n", 139 | "# Type convert and scale the test and training data\n", 140 | "x_train = x_train.astype('float32')\n", 141 | "x_test = x_test.astype('float32')\n", 142 | "x_train /= 255\n", 143 | "x_test /= 255\n", 144 | "print('x_train shape (after reshape):', x_train.shape)\n", 145 | "print('x_test shape (after reshape):', x_test.shape)" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "metadata": {}, 152 | "outputs": [], 153 | "source": [ 154 | "import numpy as np\n", 155 | "import matplotlib.pyplot as plt\n", 156 | "\n", 157 | "img_index = 1\n", 158 | "plt.imshow(1-x_train[img_index][:, :, 0], cmap='gray')" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": null, 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [ 167 | "# Label Description \n", 168 | "label_dict = {\n", 169 | " 0: 'T-shirt/top',\n", 170 | " 1: 'Trouser',\n", 171 | " 2: 'Pullover',\n", 172 | " 3: 'Dress',\n", 173 | " 4: 'Coat',\n", 174 | " 5: 'Sandal',\n", 175 | " 6: 'Shirt',\n", 176 | " 7: 'Sneaker',\n", 177 | " 8: 'Bag',\n", 178 | " 9: 'Ankle boot'\n", 179 | "}" 180 | ] 181 | }, 182 | { 183 | "cell_type": "code", 184 | "execution_count": null, 185 | "metadata": {}, 186 | "outputs": [], 187 | "source": [ 188 | "print(\"Before:\\n{}\".format(y_train[:4]))\n", 189 | "# convert class vectors to binary class matrices. One-hot encoding\n", 190 | "# 3 => 0 0 0 1 0 0 0 0 0 0 and 1 => 0 1 0 0 0 0 0 0 0 0 \n", 191 | "y_train = keras.utils.to_categorical(y_train, num_classes)\n", 192 | "y_test = keras.utils.to_categorical(y_test, num_classes)\n", 193 | "print(\"After:\\n{}\".format(y_train[:4])) # verify one-hot encoding" 194 | ] 195 | }, 196 | { 197 | "cell_type": "markdown", 198 | "metadata": {}, 199 | "source": [ 200 | "## Define the model" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": null, 206 | "metadata": {}, 207 | "outputs": [], 208 | "source": [ 209 | "model = Sequential()\n", 210 | "model.add(Conv2D(32, kernel_size=(3, 3),\n", 211 | " activation='relu',\n", 212 | " input_shape=input_shape))\n", 213 | "model.add(MaxPooling2D(pool_size=(2, 2)))\n", 214 | "model.add(Conv2D(64, (3, 3), activation='relu'))\n", 215 | "model.add(MaxPooling2D(pool_size=(2, 2)))\n", 216 | "model.add(Dropout(0.25))\n", 217 | "model.add(Flatten())\n", 218 | "model.add(Dense(128, activation='relu'))\n", 219 | "model.add(Dropout(0.5))\n", 220 | "model.add(Dense(num_classes, activation='softmax'))\n", 221 | "\n", 222 | "# Take a look at the model summary\n", 223 | "model.summary()" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": {}, 230 | "outputs": [], 231 | "source": [ 232 | "# define compile to minimize categorical loss, use ada delta optimized, and optimize to maximizing accuracy\n", 233 | "model.compile(loss=keras.losses.categorical_crossentropy,\n", 234 | " optimizer=keras.optimizers.Adam(),\n", 235 | " metrics=['accuracy'])" 236 | ] 237 | }, 238 | { 239 | "cell_type": "markdown", 240 | "metadata": {}, 241 | "source": [ 242 | "## Setup Experiment" 243 | ] 244 | }, 245 | { 246 | "cell_type": "code", 247 | "execution_count": null, 248 | "metadata": {}, 249 | "outputs": [], 250 | "source": [ 251 | "experiment_name = 'fashion-mnist'\n", 252 | "experiment = Experiment(workspace=ws, name=experiment_name)" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": null, 258 | "metadata": {}, 259 | "outputs": [], 260 | "source": [ 261 | "experiment" 262 | ] 263 | }, 264 | { 265 | "cell_type": "markdown", 266 | "metadata": {}, 267 | "source": [ 268 | "## Train model" 269 | ] 270 | }, 271 | { 272 | "cell_type": "code", 273 | "execution_count": null, 274 | "metadata": {}, 275 | "outputs": [], 276 | "source": [ 277 | "# Define early stopping callback\n", 278 | "my_callbacks = [\n", 279 | " EarlyStopping(monitor='val_accuracy', patience=5, mode='max'),\n", 280 | " ModelCheckpoint(os.path.join(project_folder, \"checkpoint.h5\"), verbose=1)\n", 281 | "]" 282 | ] 283 | }, 284 | { 285 | "cell_type": "code", 286 | "execution_count": null, 287 | "metadata": {}, 288 | "outputs": [], 289 | "source": [ 290 | "try:\n", 291 | " run = experiment.start_logging()\n", 292 | " run.tag(\"Description\",\"Locally trained Fashion MNIST model\")\n", 293 | "\n", 294 | " # Train the model and test/validate the mode with the test data after each cycle (epoch) through the training data\n", 295 | " # Return history of loss and accuracy for each epoch\n", 296 | " hist = model.fit(x_train, y_train,\n", 297 | " batch_size=batch_size,\n", 298 | " epochs=epochs,\n", 299 | " verbose=1,\n", 300 | " callbacks=my_callbacks,\n", 301 | " validation_data=(x_test, y_test))\n", 302 | " run.log_list('Training Loss', hist.history['loss'])\n", 303 | " run.log_list('Training Accuracy', hist.history['accuracy'])\n", 304 | " run.log_list('Validation Accuracy', hist.history['val_accuracy'])\n", 305 | "\n", 306 | " # Evaluate the model with the test data to get the scores on \"real\" data.\n", 307 | " score = model.evaluate(x_test, y_test, verbose=0)\n", 308 | " print('Test loss:', score[0])\n", 309 | " print('Test accuracy:', score[1])\n", 310 | " run.log('loss', score[0])\n", 311 | " run.log('accuracy', score[1])\n", 312 | "\n", 313 | " # Plot data to see relationships in training and validation data\n", 314 | " import numpy as np\n", 315 | " import matplotlib.pyplot as plt\n", 316 | " epoch_list = list(range(1, len(hist.history['accuracy']) + 1)) # values for x axis [1, 2, ..., # of epochs]\n", 317 | " plt.plot(epoch_list, hist.history['accuracy'], epoch_list, hist.history['val_accuracy'])\n", 318 | "\n", 319 | " plt.legend(('Training Accuracy', 'Validation Accuracy'))\n", 320 | " run.log_image(name='Accuracy', plot=plt)\n", 321 | "\n", 322 | " run.complete()\n", 323 | "except Exception as e:\n", 324 | " run.fail()\n", 325 | " print(str(e))" 326 | ] 327 | }, 328 | { 329 | "cell_type": "code", 330 | "execution_count": null, 331 | "metadata": {}, 332 | "outputs": [], 333 | "source": [ 334 | "# Evaluate the model with the test data to get the scores on \"real\" data.\n", 335 | "score = model.evaluate(x_test, y_test, verbose=0)\n", 336 | "print('Test loss:', score[0])\n", 337 | "print('Test accuracy:', score[1])" 338 | ] 339 | }, 340 | { 341 | "cell_type": "code", 342 | "execution_count": null, 343 | "metadata": {}, 344 | "outputs": [], 345 | "source": [ 346 | "# Plot data to see relationships in training and validation data\n", 347 | "import numpy as np\n", 348 | "import matplotlib.pyplot as plt\n", 349 | "epoch_list = list(range(1, len(hist.history['accuracy']) + 1)) # values for x axis [1, 2, ..., # of epochs]\n", 350 | "plt.plot(epoch_list, hist.history['accuracy'], epoch_list, hist.history['val_accuracy'])\n", 351 | "plt.legend(('Training Accuracy', 'Validation Accuracy'))\n", 352 | "plt.show()" 353 | ] 354 | }, 355 | { 356 | "cell_type": "markdown", 357 | "metadata": {}, 358 | "source": [ 359 | "## Look at some predictions" 360 | ] 361 | }, 362 | { 363 | "cell_type": "code", 364 | "execution_count": null, 365 | "metadata": {}, 366 | "outputs": [], 367 | "source": [ 368 | "def run_prediction(idx):\n", 369 | " result = np.argmax(model.predict(x_test[idx:idx+1]))\n", 370 | " label = np.argmax(y_test[idx])\n", 371 | " print('Prediction: {} ({})'.format(result, label_dict[result]))\n", 372 | " print('Label: {} ({})'.format(label, label_dict[label]))\n", 373 | " #plt.imshow(1-x_test[idx][:, :, 0], cmap='gray')" 374 | ] 375 | }, 376 | { 377 | "cell_type": "code", 378 | "execution_count": null, 379 | "metadata": {}, 380 | "outputs": [], 381 | "source": [ 382 | "import random\n", 383 | "\n", 384 | "for _ in range(1,10):\n", 385 | " idx = random.randint(0, 47-1)\n", 386 | " run_prediction(idx)" 387 | ] 388 | }, 389 | { 390 | "cell_type": "markdown", 391 | "metadata": {}, 392 | "source": [ 393 | "## Keras exports" 394 | ] 395 | }, 396 | { 397 | "cell_type": "code", 398 | "execution_count": null, 399 | "metadata": {}, 400 | "outputs": [], 401 | "source": [ 402 | "keras_path = os.path.join(project_folder, \"keras\")\n", 403 | "os.makedirs(keras_path, exist_ok=True)\n", 404 | "\n", 405 | "with open(os.path.join(keras_path, \"model.json\"), 'w') as f:\n", 406 | " f.write(model.to_json())\n", 407 | "model.save_weights(os.path.join(keras_path, 'model.h5'))\n", 408 | "\n", 409 | "model.save(os.path.join(keras_path, 'full_model.h5'))" 410 | ] 411 | }, 412 | { 413 | "cell_type": "markdown", 414 | "metadata": {}, 415 | "source": [ 416 | "## Upload model files" 417 | ] 418 | }, 419 | { 420 | "cell_type": "code", 421 | "execution_count": null, 422 | "metadata": {}, 423 | "outputs": [], 424 | "source": [ 425 | "import os\n", 426 | "for root, dirs, files in os.walk(project_folder, topdown=False):\n", 427 | " for filename in files:\n", 428 | " source = os.path.join(root, filename)\n", 429 | " name = source.replace(project_folder, \"./outputs\")\n", 430 | " print(\"Uploading \" + filename)\n", 431 | " run.upload_file(name, source)" 432 | ] 433 | } 434 | ], 435 | "metadata": { 436 | "kernelspec": { 437 | "display_name": "Python 3.6 - AzureML", 438 | "language": "python", 439 | "name": "python3-azureml" 440 | }, 441 | "language_info": { 442 | "codemirror_mode": { 443 | "name": "ipython", 444 | "version": 3 445 | }, 446 | "file_extension": ".py", 447 | "mimetype": "text/x-python", 448 | "name": "python", 449 | "nbconvert_exporter": "python", 450 | "pygments_lexer": "ipython3", 451 | "version": "3.6.9" 452 | } 453 | }, 454 | "nbformat": 4, 455 | "nbformat_minor": 2 456 | } 457 | -------------------------------------------------------------------------------- /deploy-r-models/SQL/SqlServerMachineLearningServices.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# SQL Server / SQL Database Machine Learning Service\n", 20 | "## Create **Motor Trend Car Road Tests** Table" 21 | ], 22 | "metadata": { 23 | "azdata_cell_guid": "bcabc77a-a2c2-4cc8-8b2d-059b7a167cbb" 24 | } 25 | }, 26 | { 27 | "cell_type": "code", 28 | "source": [ 29 | "DROP TABLE IF EXISTS [dbo].[MotorTrendCarRoadTests]\n", 30 | "GO\n", 31 | "CREATE TABLE [dbo].[MotorTrendCarRoadTests] (\n", 32 | " [mpg] FLOAT NOT NULL -- Miles/(US) gallon\n", 33 | " , [cyl] INT NOT NULL -- Number of cylinders\n", 34 | " , [hp] FLOAT NOT NULL -- Gross horsepower\n", 35 | " , [disp] FLOAT NOT NULL -- Displacement (cu.in.)\n", 36 | " , [drat] FLOAT NOT NULL -- Rear axle ratio\n", 37 | " , [wt] FLOAT NOT NULL -- Weight (1000 lbs)\n", 38 | " , [qsec] FLOAT NOT NULL -- 1/4 mile time\n", 39 | " , [vs] INT NOT NULL -- Engine (0 = V-shaped, 1 = straight)\n", 40 | " , [am] INT NOT NULL -- Transmission (0 = automatic, 1 = manual)\n", 41 | " , [gear] INT NOT NULL -- Number of forward gears\n", 42 | " , [carb] INT NOT NULL -- Number of carburetors\n", 43 | ");" 44 | ], 45 | "metadata": { 46 | "azdata_cell_guid": "da6d1284-aca7-4bc4-b17c-51a9745412e2" 47 | }, 48 | "outputs": [], 49 | "execution_count": 20 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "source": [ 54 | "## Import data from R dataset" 55 | ], 56 | "metadata": { 57 | "azdata_cell_guid": "a0e0b573-7c12-48b2-83bd-31fb527a6736" 58 | } 59 | }, 60 | { 61 | "cell_type": "code", 62 | "source": [ 63 | "INSERT INTO dbo.MotorTrendCarRoadTests\n", 64 | "EXEC sp_execute_external_script\n", 65 | " @language = N'R'\n", 66 | " , @script = N'motor_trend_car_road_tests <- mtcars;'\n", 67 | " , @input_data_1 = N''\n", 68 | " , @output_data_1_name = N'motor_trend_car_road_tests'" 69 | ], 70 | "metadata": { 71 | "azdata_cell_guid": "8e1175e0-fbc3-4d80-9e84-a51c5c8cc165" 72 | }, 73 | "outputs": [], 74 | "execution_count": 21 75 | }, 76 | { 77 | "cell_type": "markdown", 78 | "source": [ 79 | "## Train & Save Model\n", 80 | "### Create the *Training* Stored Procedure \n", 81 | "Creating a stored procedure to train a Logistic Regression Model" 82 | ], 83 | "metadata": { 84 | "azdata_cell_guid": "2f67f8d2-9092-4400-9edd-438424845655" 85 | } 86 | }, 87 | { 88 | "cell_type": "code", 89 | "source": [ 90 | "DROP PROCEDURE IF EXISTS dbo.train_manual_transmission_model_v1;\n", 91 | "GO\n", 92 | "CREATE PROCEDURE dbo.train_manual_transmission_model_v1\n", 93 | "( \n", 94 | " @model_name nvarchar(30) OUTPUT\n", 95 | " , @model_version int OUTPUT\n", 96 | " , @trained_model varbinary(max) OUTPUT\n", 97 | ")\n", 98 | "AS\n", 99 | "BEGIN\n", 100 | " EXEC sp_execute_external_script\n", 101 | " @language = N'R'\n", 102 | " , @script = N'\n", 103 | " lr_model <- glm(formula = am ~ hp + wt, data = MotorTrendCarRoadTests, family = binomial);\n", 104 | "\n", 105 | " trained_model <- as.raw(serialize(lr_model, connection=NULL));\n", 106 | " model_name <- \"manual_transmission_model\";\n", 107 | " model_version <- as.integer(1);\n", 108 | " '\n", 109 | " , @input_data_1 = N'SELECT [hp], [wt], [am] FROM MotorTrendCarRoadTests'\n", 110 | " , @input_data_1_name = N'MotorTrendCarRoadTests'\n", 111 | " , @params = N'@model_name nvarchar(30) OUTPUT, @model_version int OUTPUT, @trained_model varbinary(max) OUTPUT'\n", 112 | "\t, @model_name = @model_name OUTPUT\n", 113 | " , @model_version = @model_version OUTPUT\n", 114 | " , @trained_model = @trained_model OUTPUT;\n", 115 | "END;" 116 | ], 117 | "metadata": { 118 | "azdata_cell_guid": "b97d4e31-c2db-4bdc-a16f-835130b50bed" 119 | }, 120 | "outputs": [], 121 | "execution_count": 22 122 | }, 123 | { 124 | "cell_type": "markdown", 125 | "source": [ 126 | "### Using RevoScaleR" 127 | ], 128 | "metadata": { 129 | "azdata_cell_guid": "2f02c6c1-048d-4674-918e-68297073b220" 130 | } 131 | }, 132 | { 133 | "cell_type": "code", 134 | "source": [ 135 | "DROP PROCEDURE IF EXISTS dbo.train_manual_transmission_model_v2;\n", 136 | "GO\n", 137 | "CREATE PROCEDURE dbo.train_manual_transmission_model_v2\n", 138 | "( \n", 139 | " @model_name nvarchar(30) OUTPUT\n", 140 | " , @model_version int OUTPUT\n", 141 | " , @trained_model varbinary(max) OUTPUT\n", 142 | ")\n", 143 | "AS\n", 144 | "BEGIN\n", 145 | " EXEC sp_execute_external_script\n", 146 | " @language = N'R'\n", 147 | " , @script = N'\n", 148 | " require(\"RevoScaleR\");\n", 149 | "\n", 150 | " lr_model <- rxLogit(formula = am ~ hp + wt, data = MotorTrendCarRoadTests);\n", 151 | "\n", 152 | " trained_model <- rxSerializeModel(lr_model, realtimeScoringOnly = TRUE);\n", 153 | " model_name <- \"manual_transmission_model\";\n", 154 | " model_version <- as.integer(2);\n", 155 | " '\n", 156 | " , @input_data_1 = N'SELECT [hp], [wt], [am] FROM MotorTrendCarRoadTests'\n", 157 | " , @input_data_1_name = N'MotorTrendCarRoadTests'\n", 158 | " , @params = N'@model_name nvarchar(30) OUTPUT, @model_version int OUTPUT, @trained_model varbinary(max) OUTPUT'\n", 159 | "\t, @model_name = @model_name OUTPUT\n", 160 | " , @model_version = @model_version OUTPUT\n", 161 | " , @trained_model = @trained_model OUTPUT;\n", 162 | "END;" 163 | ], 164 | "metadata": { 165 | "azdata_cell_guid": "0341afd8-b797-4bcc-a38e-d91f02af8f7d" 166 | }, 167 | "outputs": [], 168 | "execution_count": 23 169 | }, 170 | { 171 | "cell_type": "markdown", 172 | "source": [ 173 | "### Create a table for the machine learning models" 174 | ], 175 | "metadata": { 176 | "azdata_cell_guid": "0e736e33-12d5-4748-89e3-74f22e616144" 177 | } 178 | }, 179 | { 180 | "cell_type": "code", 181 | "source": [ 182 | "DROP TABLE IF EXISTS [dbo].[MachineLearningModels]\n", 183 | "GO\n", 184 | "CREATE TABLE [dbo].[MachineLearningModels] (\n", 185 | " model_name nvarchar(30) not null default('default model')\n", 186 | " , model_version int not null default(1)\n", 187 | " , model varbinary(max) not null\n", 188 | " , created_on datetime not null default(getdate())\n", 189 | " , last_updated_on datetime not null default(getdate())\n", 190 | " CONSTRAINT PK_MachineLearningModels PRIMARY KEY (model_name, model_version)\t\n", 191 | ");" 192 | ], 193 | "metadata": { 194 | "azdata_cell_guid": "ad46d379-73b6-40bc-9f69-2e814d31e60e", 195 | "tags": [] 196 | }, 197 | "outputs": [], 198 | "execution_count": 24 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "source": [ 203 | "### Train & Save Regression Model" 204 | ], 205 | "metadata": { 206 | "azdata_cell_guid": "7ff3da54-e027-4a58-b7d1-1a6cc0015b39" 207 | } 208 | }, 209 | { 210 | "cell_type": "code", 211 | "source": [ 212 | "DECLARE @model_name NVARCHAR(30), @model_version INT, @model VARBINARY(MAX);\n", 213 | "\n", 214 | "EXEC dbo.train_manual_transmission_model_v1 @model_name OUTPUT, @model_version OUTPUT, @model OUTPUT;\n", 215 | "\n", 216 | "DELETE\n", 217 | "FROM [dbo].[MachineLearningModels] \n", 218 | "WHERE model_name = @model_name\n", 219 | "AND model_version = @model_version;\n", 220 | "\n", 221 | "INSERT\n", 222 | "INTO [dbo].[MachineLearningModels] \n", 223 | " (model_name, model_version, model)\n", 224 | "VALUES (@model_name, @model_version, @model);" 225 | ], 226 | "metadata": { 227 | "azdata_cell_guid": "20ebda02-03ed-414a-8117-8111f6f211dc", 228 | "tags": [] 229 | }, 230 | "outputs": [], 231 | "execution_count": 25 232 | }, 233 | { 234 | "cell_type": "code", 235 | "source": [ 236 | "DECLARE @model_name NVARCHAR(30), @model_version INT, @model VARBINARY(MAX);\n", 237 | "\n", 238 | "EXEC dbo.train_manual_transmission_model_v2 @model_name OUTPUT, @model_version OUTPUT, @model OUTPUT;\n", 239 | "\n", 240 | "DELETE\n", 241 | "FROM [dbo].[MachineLearningModels] \n", 242 | "WHERE model_name = @model_name\n", 243 | "AND model_version = @model_version;\n", 244 | "\n", 245 | "INSERT\n", 246 | "INTO [dbo].[MachineLearningModels] \n", 247 | " (model_name, model_version, model)\n", 248 | "VALUES (@model_name, @model_version, @model);" 249 | ], 250 | "metadata": { 251 | "azdata_cell_guid": "1605c137-c54c-47ac-b582-1b2e8ac19c5c" 252 | }, 253 | "outputs": [], 254 | "execution_count": 26 255 | }, 256 | { 257 | "cell_type": "markdown", 258 | "source": [ 259 | "## Prediction\n", 260 | "### Via Stored Procedure" 261 | ], 262 | "metadata": { 263 | "azdata_cell_guid": "4a7d0a41-f8cf-44a5-9eec-8179ccf3cb5e" 264 | } 265 | }, 266 | { 267 | "cell_type": "code", 268 | "source": [ 269 | "DROP PROCEDURE IF EXISTS dbo.predict_manual_transmission;\n", 270 | "GO\n", 271 | "CREATE PROCEDURE dbo.predict_manual_transmission\n", 272 | "( \n", 273 | " @hp FLOAT\n", 274 | " , @wt FLOAT\n", 275 | " , @am FLOAT OUTPUT\n", 276 | ")\n", 277 | "AS\n", 278 | "BEGIN\n", 279 | " DECLARE @model VARBINARY(MAX) = \n", 280 | " (SELECT TOP(1) model \n", 281 | " FROM [dbo].[MachineLearningModels] \n", 282 | " WHERE model_name = 'manual_transmission_model' \n", 283 | " AND model_version = 1);\n", 284 | "\n", 285 | " EXEC sp_execute_external_script\n", 286 | " @language = N'R'\n", 287 | " , @script = N'\n", 288 | " lr_model <- unserialize(as.raw(trained_model));\n", 289 | " \n", 290 | " newdata <- data.frame(hp = hp, wt = wt);\n", 291 | " am <- predict(lr_model, newdata, type = \"response\");\n", 292 | " ' \n", 293 | " , @params = N'@trained_model varbinary(max), @hp FLOAT, @wt FLOAT, @am FLOAT OUTPUT'\n", 294 | "\t, @trained_model = @model\n", 295 | " , @hp = @hp\n", 296 | " , @wt = @wt\n", 297 | " , @am = @am OUTPUT;\n", 298 | "END" 299 | ], 300 | "metadata": { 301 | "azdata_cell_guid": "293509ba-5a65-452a-95ae-1751facf4070" 302 | }, 303 | "outputs": [], 304 | "execution_count": 27 305 | }, 306 | { 307 | "cell_type": "code", 308 | "source": [ 309 | "DECLARE @am FLOAT;\n", 310 | "EXEC dbo.predict_manual_transmission 120, 2.8, @am OUTPUT;\n", 311 | "SELECT @am AS ManualTransmissionPropability;" 312 | ], 313 | "metadata": { 314 | "azdata_cell_guid": "1f90bb1b-4c3c-4eb3-83a9-3fb0417d76a5" 315 | }, 316 | "outputs": [], 317 | "execution_count": 28 318 | }, 319 | { 320 | "cell_type": "markdown", 321 | "source": [ 322 | "## Native Scoring" 323 | ], 324 | "metadata": { 325 | "azdata_cell_guid": "6797adcc-3460-43a2-a32b-5e83203d7926" 326 | } 327 | }, 328 | { 329 | "cell_type": "code", 330 | "source": [ 331 | "DECLARE @model VARBINARY(MAX) = (SELECT TOP(1) model FROM [dbo].[MachineLearningModels] WHERE model_name = 'manual_transmission_model' AND model_version = 2);\n", 332 | "SELECT d.*, p.* \n", 333 | "FROM PREDICT(MODEL = @model, DATA = [dbo].[MotorTrendCarRoadTests] AS d) WITH (am_Pred float) AS p;" 334 | ], 335 | "metadata": { 336 | "azdata_cell_guid": "fae5e513-f856-4dee-bd04-8485689d31c6" 337 | }, 338 | "outputs": [], 339 | "execution_count": 29 340 | } 341 | ] 342 | } -------------------------------------------------------------------------------- /deploy-r-models/TestDeployedRModels/TestDeployedRModels/carsService/CarsServiceExtensions.cs: -------------------------------------------------------------------------------- 1 | // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 2 | // Changes may cause incorrect behavior and will be lost if the code is 3 | // regenerated. 4 | 5 | namespace TestDeployedRModels 6 | { 7 | using System; 8 | using System.Collections; 9 | using System.Collections.Generic; 10 | using System.Threading; 11 | using System.Threading.Tasks; 12 | using Microsoft.Rest; 13 | using Models; 14 | 15 | /// 16 | /// Extension methods for CarsService. 17 | /// 18 | public static partial class CarsServiceExtensions 19 | { 20 | /// 21 | /// Logs the user in 22 | /// 23 | /// 24 | /// The operations group for this extension method. 25 | /// 26 | /// 27 | /// 28 | public static AccessTokenResponse Login(this ICarsService operations, LoginRequest loginRequest) 29 | { 30 | return Task.Factory.StartNew(s => ((ICarsService)s).LoginAsync(loginRequest), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 31 | } 32 | 33 | /// 34 | /// Logs the user in 35 | /// 36 | /// 37 | /// The operations group for this extension method. 38 | /// 39 | /// 40 | /// 41 | /// 42 | /// The cancellation token. 43 | /// 44 | public static async Task LoginAsync(this ICarsService operations, LoginRequest loginRequest, CancellationToken cancellationToken = default(CancellationToken)) 45 | { 46 | using (var _result = await operations.LoginWithHttpMessagesAsync(loginRequest, null, cancellationToken).ConfigureAwait(false)) 47 | { 48 | return _result.Body; 49 | } 50 | } 51 | 52 | /// 53 | /// The user renews access token and refresh token 54 | /// 55 | /// 56 | /// The operations group for this extension method. 57 | /// 58 | /// 59 | /// 60 | public static AccessTokenResponse RenewToken(this ICarsService operations, RenewTokenRequest renewTokenRequest) 61 | { 62 | return Task.Factory.StartNew(s => ((ICarsService)s).RenewTokenAsync(renewTokenRequest), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 63 | } 64 | 65 | /// 66 | /// The user renews access token and refresh token 67 | /// 68 | /// 69 | /// The operations group for this extension method. 70 | /// 71 | /// 72 | /// 73 | /// 74 | /// The cancellation token. 75 | /// 76 | public static async Task RenewTokenAsync(this ICarsService operations, RenewTokenRequest renewTokenRequest, CancellationToken cancellationToken = default(CancellationToken)) 77 | { 78 | using (var _result = await operations.RenewTokenWithHttpMessagesAsync(renewTokenRequest, null, cancellationToken).ConfigureAwait(false)) 79 | { 80 | return _result.Body; 81 | } 82 | } 83 | 84 | /// 85 | /// The user revokes a refresh token 86 | /// 87 | /// 88 | /// The operations group for this extension method. 89 | /// 90 | /// 91 | /// The refresh token to be revoked 92 | /// 93 | public static AccessTokenResponse RevokeRefreshToken(this ICarsService operations, string refreshToken) 94 | { 95 | return Task.Factory.StartNew(s => ((ICarsService)s).RevokeRefreshTokenAsync(refreshToken), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 96 | } 97 | 98 | /// 99 | /// The user revokes a refresh token 100 | /// 101 | /// 102 | /// The operations group for this extension method. 103 | /// 104 | /// 105 | /// The refresh token to be revoked 106 | /// 107 | /// 108 | /// The cancellation token. 109 | /// 110 | public static async Task RevokeRefreshTokenAsync(this ICarsService operations, string refreshToken, CancellationToken cancellationToken = default(CancellationToken)) 111 | { 112 | using (var _result = await operations.RevokeRefreshTokenWithHttpMessagesAsync(refreshToken, null, cancellationToken).ConfigureAwait(false)) 113 | { 114 | return _result.Body; 115 | } 116 | } 117 | 118 | /// 119 | /// Consume the carsService web service. 120 | /// 121 | /// 122 | /// The operations group for this extension method. 123 | /// 124 | /// 125 | /// Input parameters to the web service. 126 | /// 127 | public static WebServiceResult ManualTransmission(this ICarsService operations, InputParameters webServiceParameters) 128 | { 129 | return Task.Factory.StartNew(s => ((ICarsService)s).ManualTransmissionAsync(webServiceParameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 130 | } 131 | 132 | /// 133 | /// Consume the carsService web service. 134 | /// 135 | /// 136 | /// The operations group for this extension method. 137 | /// 138 | /// 139 | /// Input parameters to the web service. 140 | /// 141 | /// 142 | /// The cancellation token. 143 | /// 144 | public static async Task ManualTransmissionAsync(this ICarsService operations, InputParameters webServiceParameters, CancellationToken cancellationToken = default(CancellationToken)) 145 | { 146 | using (var _result = await operations.ManualTransmissionWithHttpMessagesAsync(webServiceParameters, null, cancellationToken).ConfigureAwait(false)) 147 | { 148 | return _result.Body; 149 | } 150 | } 151 | 152 | /// 153 | /// Consume the carsService web service asynchronously. 154 | /// 155 | /// 156 | /// The operations group for this extension method. 157 | /// 158 | /// 159 | /// Input parameters to the web service. 160 | /// 161 | /// 162 | /// Number of threads used to process entries in the batch. Default value is 163 | /// 10. Please make sure not to use too high of a number because it might 164 | /// negatively impact performance. 165 | /// 166 | public static StartBatchExecutionResponse StartBatchExecution(this ICarsService operations, IList batchWebServiceParameters, int? parallelCount = default(int?)) 167 | { 168 | return Task.Factory.StartNew(s => ((ICarsService)s).StartBatchExecutionAsync(batchWebServiceParameters, parallelCount), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 169 | } 170 | 171 | /// 172 | /// Consume the carsService web service asynchronously. 173 | /// 174 | /// 175 | /// The operations group for this extension method. 176 | /// 177 | /// 178 | /// Input parameters to the web service. 179 | /// 180 | /// 181 | /// Number of threads used to process entries in the batch. Default value is 182 | /// 10. Please make sure not to use too high of a number because it might 183 | /// negatively impact performance. 184 | /// 185 | /// 186 | /// The cancellation token. 187 | /// 188 | public static async Task StartBatchExecutionAsync(this ICarsService operations, IList batchWebServiceParameters, int? parallelCount = default(int?), CancellationToken cancellationToken = default(CancellationToken)) 189 | { 190 | using (var _result = await operations.StartBatchExecutionWithHttpMessagesAsync(batchWebServiceParameters, parallelCount, null, cancellationToken).ConfigureAwait(false)) 191 | { 192 | return _result.Body; 193 | } 194 | } 195 | 196 | /// 197 | /// Gets all batch executions for carsService. 198 | /// 199 | /// 200 | /// The operations group for this extension method. 201 | /// 202 | public static IList GetBatchExecutions(this ICarsService operations) 203 | { 204 | return Task.Factory.StartNew(s => ((ICarsService)s).GetBatchExecutionsAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 205 | } 206 | 207 | /// 208 | /// Gets all batch executions for carsService. 209 | /// 210 | /// 211 | /// The operations group for this extension method. 212 | /// 213 | /// 214 | /// The cancellation token. 215 | /// 216 | public static async Task> GetBatchExecutionsAsync(this ICarsService operations, CancellationToken cancellationToken = default(CancellationToken)) 217 | { 218 | using (var _result = await operations.GetBatchExecutionsWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) 219 | { 220 | return _result.Body; 221 | } 222 | } 223 | 224 | /// 225 | /// Gets all batch executions for carsService. 226 | /// 227 | /// 228 | /// The operations group for this extension method. 229 | /// 230 | /// 231 | /// Execution id of the execution 232 | /// 233 | /// 234 | /// Returns the already processed results of the batch execution even if it 235 | /// hasn't been fully completed. 236 | /// 237 | public static BatchWebServiceResult GetBatchExecutionStatus(this ICarsService operations, string executionId, bool? showPartialResults = default(bool?)) 238 | { 239 | return Task.Factory.StartNew(s => ((ICarsService)s).GetBatchExecutionStatusAsync(executionId, showPartialResults), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 240 | } 241 | 242 | /// 243 | /// Gets all batch executions for carsService. 244 | /// 245 | /// 246 | /// The operations group for this extension method. 247 | /// 248 | /// 249 | /// Execution id of the execution 250 | /// 251 | /// 252 | /// Returns the already processed results of the batch execution even if it 253 | /// hasn't been fully completed. 254 | /// 255 | /// 256 | /// The cancellation token. 257 | /// 258 | public static async Task GetBatchExecutionStatusAsync(this ICarsService operations, string executionId, bool? showPartialResults = default(bool?), CancellationToken cancellationToken = default(CancellationToken)) 259 | { 260 | using (var _result = await operations.GetBatchExecutionStatusWithHttpMessagesAsync(executionId, showPartialResults, null, cancellationToken).ConfigureAwait(false)) 261 | { 262 | return _result.Body; 263 | } 264 | } 265 | 266 | /// 267 | /// Cancels and deletes all batch executions for carsService. 268 | /// 269 | /// 270 | /// The operations group for this extension method. 271 | /// 272 | /// 273 | /// Execution id of the execution. 274 | /// 275 | public static IList CancelAndDeleteBatchExecution(this ICarsService operations, string executionId) 276 | { 277 | return Task.Factory.StartNew(s => ((ICarsService)s).CancelAndDeleteBatchExecutionAsync(executionId), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 278 | } 279 | 280 | /// 281 | /// Cancels and deletes all batch executions for carsService. 282 | /// 283 | /// 284 | /// The operations group for this extension method. 285 | /// 286 | /// 287 | /// Execution id of the execution. 288 | /// 289 | /// 290 | /// The cancellation token. 291 | /// 292 | public static async Task> CancelAndDeleteBatchExecutionAsync(this ICarsService operations, string executionId, CancellationToken cancellationToken = default(CancellationToken)) 293 | { 294 | using (var _result = await operations.CancelAndDeleteBatchExecutionWithHttpMessagesAsync(executionId, null, cancellationToken).ConfigureAwait(false)) 295 | { 296 | return _result.Body; 297 | } 298 | } 299 | 300 | /// 301 | /// Gets all files from an individual execution in carsService. 302 | /// 303 | /// 304 | /// The operations group for this extension method. 305 | /// 306 | /// 307 | /// Execution id of the execution 308 | /// 309 | /// 310 | /// Index of the execution in the batch. 311 | /// 312 | public static IList GetBatchExecutionFiles(this ICarsService operations, string executionId, int index) 313 | { 314 | return Task.Factory.StartNew(s => ((ICarsService)s).GetBatchExecutionFilesAsync(executionId, index), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 315 | } 316 | 317 | /// 318 | /// Gets all files from an individual execution in carsService. 319 | /// 320 | /// 321 | /// The operations group for this extension method. 322 | /// 323 | /// 324 | /// Execution id of the execution 325 | /// 326 | /// 327 | /// Index of the execution in the batch. 328 | /// 329 | /// 330 | /// The cancellation token. 331 | /// 332 | public static async Task> GetBatchExecutionFilesAsync(this ICarsService operations, string executionId, int index, CancellationToken cancellationToken = default(CancellationToken)) 333 | { 334 | using (var _result = await operations.GetBatchExecutionFilesWithHttpMessagesAsync(executionId, index, null, cancellationToken).ConfigureAwait(false)) 335 | { 336 | return _result.Body; 337 | } 338 | } 339 | 340 | /// 341 | /// Gets a specific file from an execution in carsService. 342 | /// 343 | /// 344 | /// The operations group for this extension method. 345 | /// 346 | /// 347 | /// Execution id of the execution 348 | /// 349 | /// 350 | /// Index of the execution in the batch. 351 | /// 352 | /// 353 | /// Name of the file to be returned. 354 | /// 355 | public static System.IO.Stream GetBatchExecutionFile(this ICarsService operations, string executionId, int index, string fileName) 356 | { 357 | return Task.Factory.StartNew(s => ((ICarsService)s).GetBatchExecutionFileAsync(executionId, index, fileName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); 358 | } 359 | 360 | /// 361 | /// Gets a specific file from an execution in carsService. 362 | /// 363 | /// 364 | /// The operations group for this extension method. 365 | /// 366 | /// 367 | /// Execution id of the execution 368 | /// 369 | /// 370 | /// Index of the execution in the batch. 371 | /// 372 | /// 373 | /// Name of the file to be returned. 374 | /// 375 | /// 376 | /// The cancellation token. 377 | /// 378 | public static async Task GetBatchExecutionFileAsync(this ICarsService operations, string executionId, int index, string fileName, CancellationToken cancellationToken = default(CancellationToken)) 379 | { 380 | var _result = await operations.GetBatchExecutionFileWithHttpMessagesAsync(executionId, index, fileName, null, cancellationToken).ConfigureAwait(false); 381 | _result.Request.Dispose(); 382 | return _result.Body; 383 | } 384 | 385 | } 386 | } 387 | --------------------------------------------------------------------------------