├── azure_function_app ├── host.json ├── azure09blob.png ├── azure07nameFunc.png ├── azure08integrate.png ├── azure11copypaste.png ├── azure12copyhtml.png ├── azure03NameFuncApp.png ├── azure04GoToFuncApp.png ├── azure05CreateFuncs.png ├── azure06Webhookfunc.png ├── azure01CreateFuncApp.png ├── azure02CreateFuncApp2.png ├── azure10installExtension.png ├── azure13changeendpoints.png ├── showTestPage │ ├── function.json │ ├── run.csx │ └── testpage.html ├── extensions.csproj ├── getnexttask_v0 │ ├── function.json │ └── run.csx ├── predict_v0 │ ├── function.json │ └── run.csx ├── getresult_v0 │ ├── function.json │ └── run.csx └── puttaskresult_v0 │ ├── function.json │ └── run.csx ├── python ├── requirements.txt ├── mask.png └── deepLab.py ├── architecture.png ├── csharp └── AzureImageModelDeployExample │ ├── AzureImageModelDeployExample │ ├── App.config │ ├── bin │ │ └── Debug │ │ │ └── AzureImageModelDeployExample.exe │ ├── Properties │ │ ├── Settings.settings │ │ ├── Settings.Designer.cs │ │ ├── AssemblyInfo.cs │ │ ├── Resources.Designer.cs │ │ └── Resources.resx │ ├── Program.cs │ ├── AzureImageModelDeployExample.csproj │ ├── Form1.resx │ ├── Form1.cs │ └── Form1.Designer.cs │ └── AzureImageModelDeployExample.sln ├── .gitignore ├── LICENSE └── README.md /azure_function_app/host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0" 3 | } -------------------------------------------------------------------------------- /python/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | pillow 3 | opencv 4 | keras 5 | matplotlib 6 | imageio -------------------------------------------------------------------------------- /architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/architecture.png -------------------------------------------------------------------------------- /python/mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/python/mask.png -------------------------------------------------------------------------------- /azure_function_app/azure09blob.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure09blob.png -------------------------------------------------------------------------------- /azure_function_app/azure07nameFunc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure07nameFunc.png -------------------------------------------------------------------------------- /azure_function_app/azure08integrate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure08integrate.png -------------------------------------------------------------------------------- /azure_function_app/azure11copypaste.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure11copypaste.png -------------------------------------------------------------------------------- /azure_function_app/azure12copyhtml.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure12copyhtml.png -------------------------------------------------------------------------------- /azure_function_app/azure03NameFuncApp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure03NameFuncApp.png -------------------------------------------------------------------------------- /azure_function_app/azure04GoToFuncApp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure04GoToFuncApp.png -------------------------------------------------------------------------------- /azure_function_app/azure05CreateFuncs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure05CreateFuncs.png -------------------------------------------------------------------------------- /azure_function_app/azure06Webhookfunc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure06Webhookfunc.png -------------------------------------------------------------------------------- /azure_function_app/azure01CreateFuncApp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure01CreateFuncApp.png -------------------------------------------------------------------------------- /azure_function_app/azure02CreateFuncApp2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure02CreateFuncApp2.png -------------------------------------------------------------------------------- /azure_function_app/azure10installExtension.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure10installExtension.png -------------------------------------------------------------------------------- /azure_function_app/azure13changeendpoints.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/azure_function_app/azure13changeendpoints.png -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/App.config: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/bin/Debug/AzureImageModelDeployExample.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/douglas125/AzureMLDeploy/HEAD/csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/bin/Debug/AzureImageModelDeployExample.exe -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/obj/ 2 | csharp/AzureImageModelDeployExample/.vs/ 3 | csharp/AzureImageModelDeployExample/packages/ 4 | csharp/AzureImageModelDeployExample/Properties/ 5 | .ipynb_checkpoints/ 6 | __pycache__/ 7 | *.pdb 8 | *.xml 9 | *.h5 10 | 11 | *.exe.config -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Properties/Settings.settings: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /azure_function_app/showTestPage/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "name": "req", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "methods": [ 9 | "get", 10 | "post" 11 | ] 12 | }, 13 | { 14 | "name": "$return", 15 | "type": "http", 16 | "direction": "out" 17 | } 18 | ], 19 | "disabled": false 20 | } -------------------------------------------------------------------------------- /azure_function_app/extensions.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | netstandard2.0 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /azure_function_app/getnexttask_v0/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "name": "req", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "methods": [ 9 | "get", 10 | "post" 11 | ] 12 | }, 13 | { 14 | "name": "$return", 15 | "type": "http", 16 | "direction": "out" 17 | }, 18 | { 19 | "type": "blob", 20 | "name": "outputBlob", 21 | "path": "inboxv0/{rand-guid}", 22 | "connection": "AzureWebJobsStorage", 23 | "direction": "out" 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /azure_function_app/predict_v0/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "name": "req", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "methods": [ 9 | "get", 10 | "post" 11 | ] 12 | }, 13 | { 14 | "name": "$return", 15 | "type": "http", 16 | "direction": "out" 17 | }, 18 | { 19 | "type": "blob", 20 | "name": "outputBlob", 21 | "path": "inboxv0/{rand-guid}.payload", 22 | "connection": "AzureWebJobsStorage", 23 | "direction": "out" 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /azure_function_app/getresult_v0/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "name": "req", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "methods": [ 9 | "get", 10 | "post" 11 | ] 12 | }, 13 | { 14 | "name": "$return", 15 | "type": "http", 16 | "direction": "out" 17 | }, 18 | { 19 | "type": "blob", 20 | "name": "outputBlob", 21 | "path": "outboxv0/{rand-guid}.result", 22 | "connection": "AzureWebJobsStorage", 23 | "direction": "out" 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Program.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Linq; 4 | using System.Threading.Tasks; 5 | using System.Windows.Forms; 6 | 7 | namespace AzureImageModelDeployExample 8 | { 9 | static class Program 10 | { 11 | /// 12 | /// The main entry point for the application. 13 | /// 14 | [STAThread] 15 | static void Main() 16 | { 17 | Application.EnableVisualStyles(); 18 | Application.SetCompatibleTextRenderingDefault(false); 19 | Application.Run(new Form1()); 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /azure_function_app/puttaskresult_v0/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "name": "req", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "methods": [ 9 | "get", 10 | "post" 11 | ] 12 | }, 13 | { 14 | "name": "$return", 15 | "type": "http", 16 | "direction": "out" 17 | }, 18 | { 19 | "type": "blob", 20 | "name": "outputBlob", 21 | "path": "inboxv0/{rand-guid}", 22 | "connection": "AzureWebJobsStorage", 23 | "direction": "out" 24 | }, 25 | { 26 | "type": "blob", 27 | "name": "resultBlob", 28 | "path": "outboxv0/{rand-guid}", 29 | "connection": "AzureWebJobsStorage", 30 | "direction": "out" 31 | } 32 | ] 33 | } -------------------------------------------------------------------------------- /azure_function_app/showTestPage/run.csx: -------------------------------------------------------------------------------- 1 | #r "Newtonsoft.Json" 2 | 3 | using System.Net; 4 | using Microsoft.AspNetCore.Mvc; 5 | using Microsoft.Extensions.Primitives; 6 | using Newtonsoft.Json; 7 | 8 | using System.Net; 9 | using System.Net.Http.Headers; 10 | 11 | public static async Task Run(HttpRequest req, ILogger log) 12 | { 13 | log.LogInformation("C# HTTP trigger function processed a request."); 14 | 15 | string name = req.Query["name"]; 16 | 17 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 18 | dynamic data = JsonConvert.DeserializeObject(requestBody); 19 | name = name ?? data?.name; 20 | 21 | string page = System.IO.File.ReadAllText(@"d:\home\site\wwwroot\showTestPage\testpage.html"); 22 | MemoryStream ms = new MemoryStream(System.Text.Encoding.Default.GetBytes(page)); 23 | 24 | HttpResponseMessage a = new HttpResponseMessage(HttpStatusCode.OK); 25 | a.Content = new StreamContent(ms); 26 | a.Content.Headers.ContentType = new MediaTypeHeaderValue("text/html"); 27 | 28 | return a; 29 | } 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Douglas Coimbra de Andrade 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Properties/Settings.Designer.cs: -------------------------------------------------------------------------------- 1 | //------------------------------------------------------------------------------ 2 | // 3 | // This code was generated by a tool. 4 | // Runtime Version:4.0.30319.42000 5 | // 6 | // Changes to this file may cause incorrect behavior and will be lost if 7 | // the code is regenerated. 8 | // 9 | //------------------------------------------------------------------------------ 10 | 11 | namespace AzureImageModelDeployExample.Properties 12 | { 13 | 14 | 15 | [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] 16 | [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")] 17 | internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase 18 | { 19 | 20 | private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings()))); 21 | 22 | public static Settings Default 23 | { 24 | get 25 | { 26 | return defaultInstance; 27 | } 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | VisualStudioVersion = 15.0.28010.2046 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AzureImageModelDeployExample", "AzureImageModelDeployExample\AzureImageModelDeployExample.csproj", "{B632A376-7768-45A8-B52C-6B65D1C5A68F}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {B632A376-7768-45A8-B52C-6B65D1C5A68F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {B632A376-7768-45A8-B52C-6B65D1C5A68F}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {B632A376-7768-45A8-B52C-6B65D1C5A68F}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {B632A376-7768-45A8-B52C-6B65D1C5A68F}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {CC608A0E-EA9F-48FD-BFA4-C7FC564C52B1} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /azure_function_app/getnexttask_v0/run.csx: -------------------------------------------------------------------------------- 1 | #r "Newtonsoft.Json" 2 | #r "Microsoft.WindowsAzure.Storage" 3 | 4 | using Microsoft.WindowsAzure.Storage.Blob; 5 | 6 | using System.Net; 7 | using Microsoft.AspNetCore.Mvc; 8 | using Microsoft.Extensions.Primitives; 9 | using Newtonsoft.Json; 10 | 11 | public static async Task Run(HttpRequest req, ILogger log, CloudBlockBlob outputBlob) 12 | { 13 | log.LogInformation("C# HTTP trigger function processed a request."); 14 | 15 | string name = req.Query["name"]; 16 | //string task_id = req.Query["task_id"]; 17 | 18 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 19 | dynamic data = JsonConvert.DeserializeObject(requestBody); 20 | name = name ?? data?.name; 21 | //task_id = task_id ?? data?.task_id; 22 | 23 | //if (task_id==null) return new BadRequestObjectResult("Please pass a task_id on the query string or in the request body"); 24 | 25 | //fetch directory 26 | CloudBlobDirectory d = outputBlob.Parent; 27 | 28 | //fetch next blob 29 | var lstblobs = await d.ListBlobsSegmentedAsync(null); 30 | CloudBlockBlob bb = (CloudBlockBlob)lstblobs.Results.FirstOrDefault(); 31 | 32 | //download contents 33 | byte[] payload_bytes = new byte[bb.Properties.Length]; 34 | await bb.DownloadToByteArrayAsync(payload_bytes, 0); 35 | 36 | string payload = System.Convert.ToBase64String(payload_bytes); 37 | 38 | return (ActionResult)new OkObjectResult(bb.Name + "|" + payload); 39 | } 40 | -------------------------------------------------------------------------------- /azure_function_app/predict_v0/run.csx: -------------------------------------------------------------------------------- 1 | #r "Newtonsoft.Json" 2 | #r "Microsoft.WindowsAzure.Storage" 3 | 4 | using Microsoft.WindowsAzure.Storage.Blob; 5 | 6 | using System.Net; 7 | using Microsoft.AspNetCore.Mvc; 8 | using Microsoft.Extensions.Primitives; 9 | using Newtonsoft.Json; 10 | 11 | public static async Task Run(HttpRequest req, ILogger log, CloudBlockBlob outputBlob) 12 | { 13 | log.LogInformation("C# HTTP trigger function processed a request."); 14 | 15 | string name = req.Query["name"]; 16 | string payload = req.Query["payload"]; 17 | 18 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 19 | dynamic data = JsonConvert.DeserializeObject(requestBody); 20 | name = name ?? data?.name; 21 | payload = payload ?? data?.payload; 22 | 23 | if (payload == null) return new BadRequestObjectResult("Please send the payload!"); 24 | 25 | byte[] payload_bytes = System.Convert.FromBase64String(payload); 26 | await outputBlob.UploadFromByteArrayAsync(payload_bytes, 0, payload_bytes.Length); //, null); 27 | 28 | name=outputBlob.Name; 29 | 30 | return (ActionResult)new OkObjectResult(outputBlob.Name); 31 | } 32 | 33 | public static byte[] Base64Decode(string base64EncodedData) { 34 | byte[] base64EncodedBytes = System.Convert.FromBase64String(base64EncodedData); 35 | return base64EncodedBytes; 36 | } 37 | 38 | public static string Base64Encode(string plainText) { 39 | var plainTextBytes = System.Text.Encoding.UTF8.GetBytes(plainText); 40 | return System.Convert.ToBase64String(plainTextBytes); 41 | } -------------------------------------------------------------------------------- /azure_function_app/getresult_v0/run.csx: -------------------------------------------------------------------------------- 1 | #r "Newtonsoft.Json" 2 | #r "Microsoft.WindowsAzure.Storage" 3 | 4 | using Microsoft.WindowsAzure.Storage.Blob; 5 | 6 | using System.Net; 7 | using Microsoft.AspNetCore.Mvc; 8 | using Microsoft.Extensions.Primitives; 9 | using Newtonsoft.Json; 10 | 11 | public static async Task Run(HttpRequest req, ILogger log, CloudBlockBlob outputBlob) 12 | { 13 | log.LogInformation("C# HTTP trigger function processed a request."); 14 | 15 | string name = req.Query["name"]; 16 | string task_id = req.Query["task_id"]; 17 | 18 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 19 | dynamic data = JsonConvert.DeserializeObject(requestBody); 20 | name = name ?? data?.name; 21 | task_id = task_id ?? data?.task_id; 22 | 23 | if (task_id==null) return new BadRequestObjectResult("Please pass a task_id on the query string or in the request body"); 24 | 25 | //fetch directory 26 | CloudBlobDirectory d = outputBlob.Parent; 27 | //fetch blob 28 | CloudBlockBlob b = d.GetBlockBlobReference(task_id); 29 | 30 | bool exists = await b.ExistsAsync(); 31 | if (!exists) return (ActionResult)new OkObjectResult("dGFza19pZCBub3QgZm91bmQh"); 32 | 33 | await b.FetchAttributesAsync(); 34 | 35 | //download contents 36 | byte[] payload_bytes = new byte[b.Properties.Length]; 37 | await b.DownloadToByteArrayAsync(payload_bytes, 0); 38 | 39 | string payload = System.Convert.ToBase64String(payload_bytes); 40 | 41 | return (ActionResult)new OkObjectResult(payload); 42 | } 43 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Properties/AssemblyInfo.cs: -------------------------------------------------------------------------------- 1 | using System.Reflection; 2 | using System.Runtime.CompilerServices; 3 | using System.Runtime.InteropServices; 4 | 5 | // General Information about an assembly is controlled through the following 6 | // set of attributes. Change these attribute values to modify the information 7 | // associated with an assembly. 8 | [assembly: AssemblyTitle("AzureImageModelDeployExample")] 9 | [assembly: AssemblyDescription("")] 10 | [assembly: AssemblyConfiguration("")] 11 | [assembly: AssemblyCompany("")] 12 | [assembly: AssemblyProduct("AzureImageModelDeployExample")] 13 | [assembly: AssemblyCopyright("Copyright © 2018")] 14 | [assembly: AssemblyTrademark("")] 15 | [assembly: AssemblyCulture("")] 16 | 17 | // Setting ComVisible to false makes the types in this assembly not visible 18 | // to COM components. If you need to access a type in this assembly from 19 | // COM, set the ComVisible attribute to true on that type. 20 | [assembly: ComVisible(false)] 21 | 22 | // The following GUID is for the ID of the typelib if this project is exposed to COM 23 | [assembly: Guid("b632a376-7768-45a8-b52c-6b65d1c5a68f")] 24 | 25 | // Version information for an assembly consists of the following four values: 26 | // 27 | // Major Version 28 | // Minor Version 29 | // Build Number 30 | // Revision 31 | // 32 | // You can specify all the values or you can default the Build and Revision Numbers 33 | // by using the '*' as shown below: 34 | // [assembly: AssemblyVersion("1.0.*")] 35 | [assembly: AssemblyVersion("1.0.0.0")] 36 | [assembly: AssemblyFileVersion("1.0.0.0")] 37 | -------------------------------------------------------------------------------- /azure_function_app/puttaskresult_v0/run.csx: -------------------------------------------------------------------------------- 1 | #r "Newtonsoft.Json" 2 | #r "Microsoft.WindowsAzure.Storage" 3 | 4 | using Microsoft.WindowsAzure.Storage.Blob; 5 | 6 | using System.Net; 7 | using Microsoft.AspNetCore.Mvc; 8 | using Microsoft.Extensions.Primitives; 9 | using Newtonsoft.Json; 10 | 11 | public static async Task Run(HttpRequest req, ILogger log, CloudBlockBlob outputBlob, CloudBlockBlob resultBlob) 12 | { 13 | log.LogInformation("C# HTTP trigger function processed a request."); 14 | 15 | string name = req.Query["name"]; 16 | string task_id = req.Query["task_id"]; 17 | string result = req.Query["result"]; 18 | 19 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 20 | dynamic data = JsonConvert.DeserializeObject(requestBody); 21 | name = name ?? data?.name; 22 | task_id = task_id ?? data?.task_id; 23 | result = result ?? data?.result; 24 | 25 | if (task_id==null || result==null) return new BadRequestObjectResult("Please pass a task_id and result on the query string or in the request body"); 26 | 27 | //fetch directory 28 | CloudBlobDirectory dd = resultBlob.Parent; 29 | CloudBlockBlob bb = dd.GetBlockBlobReference(task_id); 30 | 31 | byte[] result_bytes = System.Convert.FromBase64String(result); 32 | await bb.UploadFromByteArrayAsync(result_bytes, 0, result_bytes.Length); 33 | 34 | //fetch directory 35 | CloudBlobDirectory d = outputBlob.Parent; 36 | //fetch blob 37 | CloudBlockBlob b = d.GetBlockBlobReference(task_id); 38 | await b.DeleteIfExistsAsync(); 39 | 40 | return (ActionResult)new OkObjectResult(task_id); 41 | } 42 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Properties/Resources.Designer.cs: -------------------------------------------------------------------------------- 1 | //------------------------------------------------------------------------------ 2 | // 3 | // This code was generated by a tool. 4 | // Runtime Version:4.0.30319.42000 5 | // 6 | // Changes to this file may cause incorrect behavior and will be lost if 7 | // the code is regenerated. 8 | // 9 | //------------------------------------------------------------------------------ 10 | 11 | namespace AzureImageModelDeployExample.Properties 12 | { 13 | 14 | 15 | /// 16 | /// A strongly-typed resource class, for looking up localized strings, etc. 17 | /// 18 | // This class was auto-generated by the StronglyTypedResourceBuilder 19 | // class via a tool like ResGen or Visual Studio. 20 | // To add or remove a member, edit your .ResX file then rerun ResGen 21 | // with the /str option, or rebuild your VS project. 22 | [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] 23 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] 24 | [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] 25 | internal class Resources 26 | { 27 | 28 | private static global::System.Resources.ResourceManager resourceMan; 29 | 30 | private static global::System.Globalization.CultureInfo resourceCulture; 31 | 32 | [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] 33 | internal Resources() 34 | { 35 | } 36 | 37 | /// 38 | /// Returns the cached ResourceManager instance used by this class. 39 | /// 40 | [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] 41 | internal static global::System.Resources.ResourceManager ResourceManager 42 | { 43 | get 44 | { 45 | if ((resourceMan == null)) 46 | { 47 | global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("AzureImageModelDeployExample.Properties.Resources", typeof(Resources).Assembly); 48 | resourceMan = temp; 49 | } 50 | return resourceMan; 51 | } 52 | } 53 | 54 | /// 55 | /// Overrides the current thread's CurrentUICulture property for all 56 | /// resource lookups using this strongly typed resource class. 57 | /// 58 | [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] 59 | internal static global::System.Globalization.CultureInfo Culture 60 | { 61 | get 62 | { 63 | return resourceCulture; 64 | } 65 | set 66 | { 67 | resourceCulture = value; 68 | } 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/AzureImageModelDeployExample.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Debug 6 | AnyCPU 7 | {B632A376-7768-45A8-B52C-6B65D1C5A68F} 8 | WinExe 9 | AzureImageModelDeployExample 10 | AzureImageModelDeployExample 11 | v4.6.1 12 | 512 13 | true 14 | true 15 | 16 | 17 | AnyCPU 18 | true 19 | full 20 | false 21 | bin\Debug\ 22 | DEBUG;TRACE 23 | prompt 24 | 4 25 | 26 | 27 | AnyCPU 28 | pdbonly 29 | true 30 | bin\Release\ 31 | TRACE 32 | prompt 33 | 4 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | Form 51 | 52 | 53 | Form1.cs 54 | 55 | 56 | 57 | 58 | Form1.cs 59 | 60 | 61 | ResXFileCodeGenerator 62 | Resources.Designer.cs 63 | Designer 64 | 65 | 66 | True 67 | Resources.resx 68 | 69 | 70 | SettingsSingleFileGenerator 71 | Settings.Designer.cs 72 | 73 | 74 | True 75 | Settings.settings 76 | True 77 | 78 | 79 | 80 | 81 | 82 | 83 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AzureMLDeploy 2 | 3 | A very simple way to deploy any machine learning model using Azure Functions 4 | --- 5 | 6 | Source code for an Azure Function App that makes it very simple and easy to deploy any machine learning model (Keras, TensorFlow, PyTorch, XGBoost, LightGBM, scikit-learn, etc) and consume it using any client application via REST APIs. 7 | 8 | --- 9 | 10 | **Have you ever wondered how to deploy a machine learning model as a REST API so that people can use it in apps, websites and general production settings?** 11 | 12 | It turns out that this can be complicated: you might need to set up a server with containers, export models and do a lot of other tasks. Well, not anymore. This repository contains code for a simple Azure Function App template that can be used to receive requests from clients, send them to a machine learning server (MLServer) and store the results after they are computed. 13 | 14 | The base code for Azure Functions can be found in azure_function_app folder. The simplest way to get started is to create a new function app and create the 4 endpoints needed (2 for client: send task and receive results, 2 for MLServer: retrieve task and send results). Copy and paste each function code and create the Integration with Azure Blob Storage. 15 | 16 | Note: the machine learning model will NOT necessarily run in the Azure Function App. It will run in a machine learning server of your choice, which could be local, deployed to a VM or to a container. 17 | 18 | # What is in this repository? 19 | 20 | - Source code for the Azure Function App that handles client and MLServer data; 21 | - Source code for C# client that sends images for semantic segmentation; 22 | - Source code for python MLServer that uses DeepLabv3+ to predict segmentation masks and sends results to Funcion App server. 23 | 24 | # Sample application: Image Segmentation Using DeepLabv3+ 25 | 26 | As a sample application, we show how to serve the amazing DeepLabv3+ segmentation model (https://github.com/bonlime/keras-deeplab-v3-plus) using a C# client application and a python MLServer (running locally or using Google Colab): 27 | 28 | ![alt text](architecture.png "Client-Server-MLServer Architecture") 29 | 30 | Using this architecture, it is possible to enable serving the segmentation model to a client side application (C# desktop in this case, but it could be anything - HTML, mobile app, desktop - anything that can POST to a REST endpoint). The MLServer keeps listening for new tasks - when one is found, it downloads the tasks, computes predictions and sends results back to the server. 31 | 32 | Just change the endpoint addresses to the ones in your Function App. 33 | 34 | Note that this setup is invisible to the client application -- it needs only to send query data to the REST API, save the task_id token and use the task_id to retrieve results. 35 | 36 | # Creating the Azure Function App 37 | 38 | To create and deploy the code to Azure Functions, follow these steps: 39 | 40 | ## Create the Function App 41 | 42 | ![alt text](azure_function_app/azure01CreateFuncApp.png "Create the function app") 43 | 44 | ![alt text](azure_function_app/azure02CreateFuncApp2.png "Create the function app") 45 | 46 | ![alt text](azure_function_app/azure03NameFuncApp.png "Give a name to the app") 47 | 48 | ![alt text](azure_function_app/azure04GoToFuncApp.png "After it is deployed, go to resource") 49 | 50 | ## Create the endpoints 51 | 52 | At this point, it is possible to manually install Azure Blob Extension and upload the code using Microsoft Azure Storage Explorer. The simple (although a little longer) way is shown below: 53 | 54 | ![alt text](azure_function_app/azure05CreateFuncs.png "Create the endpoints - choose to edit online") 55 | 56 | ![alt text](azure_function_app/azure06Webhookfunc.png "Create functions as webhook") 57 | 58 | ![alt text](azure_function_app/azure07nameFunc.png "Give names to each endpoint") 59 | 60 | ## Add Blob Storage integration 61 | 62 | ![alt text](azure_function_app/azure08integrate.png "Give names to each endpoint") 63 | 64 | We could simply copy-paste the run.csx and function.json of each function, but we still need to install the Azure Blob Extension and the easiest way to do it is to try to create the blob integration: 65 | 66 | ![alt text](azure_function_app/azure09blob.png "Add blob integration") 67 | 68 | ![alt text](azure_function_app/azure10installExtension.png "Install extension") 69 | 70 | ## Copy and paste the code 71 | 72 | Now that the Blob extension is ready, all we have to do is copy-paste the run.csx and function.json of each function. 73 | 74 | ![alt text](azure_function_app/azure11copypaste.png "Install extension") 75 | 76 | ## Test the Function App 77 | 78 | We can use a simple Ajax page to check if everything is working properly by creating the showTestPage function that returns an HTML page. 79 | 80 | Copy and paste the code for run.csx, create and copy the code to testpage.html (make sure to change the endpoint names to the ones of your own function - they can be accessed at the top of the code using link ` Get function URL`): 81 | 82 | ![alt text](azure_function_app/azure12copyhtml.png "Copy html making sure to change endpoints to the ones of your functions") 83 | 84 | ![alt text](azure_function_app/azure13changeendpoints.png "Test endpoints") 85 | 86 | 87 | -------------------------------------------------------------------------------- /azure_function_app/showTestPage/testpage.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Simple Machine Learning Model Server with Azure Functions 7 | 8 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 33 | 38 | 39 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 |
101 |
Simple Machine Learning Model Server Example with Azure Functions
102 |
103 |
104 |

Client side

105 |

106 | 107 | 108 | 109 |
110 |
111 |
112 | 113 |
114 | 115 |

116 |

Machine Learning Server Side

117 | 118 |
119 | 120 |
121 |
122 |

123 | 124 | 125 |
126 | 127 | 128 |
129 |
130 |
131 | 132 |
133 |
Simple Machine Learning Model Server Example with Azure Functions
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 | 143 | 144 | 145 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Properties/Resources.resx: -------------------------------------------------------------------------------- 1 | 2 | 3 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | text/microsoft-resx 107 | 108 | 109 | 2.0 110 | 111 | 112 | System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 113 | 114 | 115 | System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 116 | 117 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Form1.resx: -------------------------------------------------------------------------------- 1 | 2 | 3 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | text/microsoft-resx 110 | 111 | 112 | 2.0 113 | 114 | 115 | System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 116 | 117 | 118 | System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 119 | 120 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Form1.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.ComponentModel; 4 | using System.Data; 5 | using System.Drawing; 6 | using System.Drawing.Drawing2D; 7 | using System.Drawing.Imaging; 8 | using System.IO; 9 | using System.Linq; 10 | using System.Net.Http; 11 | using System.Runtime.InteropServices; 12 | using System.Text; 13 | using System.Threading.Tasks; 14 | using System.Windows.Forms; 15 | 16 | namespace AzureImageModelDeployExample 17 | { 18 | public partial class Form1 : Form 19 | { 20 | public Form1() 21 | { 22 | InitializeComponent(); 23 | } 24 | 25 | Bitmap bmp; 26 | Bitmap mask; 27 | 28 | private void btnLoadImage_Click(object sender, EventArgs e) 29 | { 30 | OpenFileDialog ofd = new OpenFileDialog(); 31 | ofd.Filter = "Images|*.bmp;*.png;*.jpg"; 32 | if (ofd.ShowDialog() == DialogResult.OK) 33 | { 34 | bmp = new Bitmap(ofd.FileName); 35 | bmp = ResizeImage(bmp, 512, 512); //we know that we want inference on this size 36 | 37 | picImg.Image = bmp; 38 | btnSendPredict.Enabled = true; 39 | } 40 | } 41 | 42 | private static readonly HttpClient client = new HttpClient(); 43 | private async void btnSendPredict_Click(object sender, EventArgs e) 44 | { 45 | lbl_taskid.Text = ""; 46 | string imgBase64 = bmp2base64(bmp); 47 | 48 | string content = "{'payload': '" + imgBase64 + "'}"; 49 | var response = await client.PostAsync(txtPredictEndpoint.Text, new StringContent(content, Encoding.UTF8, "application/json")); 50 | var responseString = await response.Content.ReadAsStringAsync(); 51 | 52 | lbl_taskid.Text = responseString; 53 | 54 | btnGetResults.Enabled = true; 55 | } 56 | 57 | 58 | private async void btnGetResults_Click(object sender, EventArgs e) 59 | { 60 | string content = "{'task_id': '" + lbl_taskid.Text + "'}"; 61 | 62 | var response = await client.PostAsync(txtResultsendpoint.Text, new StringContent(content, Encoding.UTF8, "application/json")); 63 | var responseString = await response.Content.ReadAsStringAsync(); 64 | 65 | if (responseString != "dGFza19pZCBub3QgZm91bmQh") 66 | { 67 | byte[] imageData = Convert.FromBase64String(responseString); 68 | using (var ms = new MemoryStream(imageData)) 69 | { 70 | mask = new Bitmap(ms); 71 | } 72 | 73 | drawMask(); 74 | } 75 | else MessageBox.Show("Task result not found"); 76 | } 77 | 78 | 79 | 80 | private void Form1_Load(object sender, EventArgs e) 81 | { 82 | foreach(string s in label_names) 83 | { 84 | chkLayers.Items.Add(s); 85 | } 86 | for (int i = 0; i < chkLayers.Items.Count; i++) chkLayers.SetItemChecked(i, true); 87 | resetColors(); 88 | } 89 | void resetColors() 90 | { 91 | label_colors.Clear(); 92 | label_colors.Add(Color.Black); 93 | Random rnd = new Random(10); 94 | for (int i = 1; i < label_names.Count; i++) 95 | { 96 | Color c = Color.FromArgb(rnd.Next(255), rnd.Next(255), rnd.Next(255)); 97 | label_colors.Add(c); 98 | } 99 | } 100 | 101 | private void chkLayers_SelectedIndexChanged(object sender, EventArgs e) 102 | { 103 | drawMask(); 104 | } 105 | 106 | private void chkOverlay_CheckedChanged(object sender, EventArgs e) 107 | { 108 | drawMask(); 109 | } 110 | 111 | #region Deal with images 112 | /// Label names, same convention as segmentation model 113 | List label_names = new List() {"background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", 114 | "car", "cat", "chair", "cow", "diningtable", "dog", "horse", "motorbike", 115 | "person", "pottedplant", "sheep", "sofa", "train", "tv" }; 116 | List label_colors = new List(); 117 | string bmp2base64(Bitmap bitmap) 118 | { 119 | using (var ms = new MemoryStream()) 120 | { 121 | bitmap.Save(ms, System.Drawing.Imaging.ImageFormat.Png); 122 | string SigBase64 = Convert.ToBase64String(ms.GetBuffer()); //Get Base64 123 | return SigBase64; 124 | } 125 | } 126 | /// 127 | /// Resize the image to the specified width and height. 128 | /// 129 | /// The image to resize. 130 | /// The width to resize to. 131 | /// The height to resize to. 132 | /// The resized image. 133 | public static Bitmap ResizeImage(Image image, int width, int height) 134 | { 135 | var destRect = new Rectangle(0, 0, width, height); 136 | var destImage = new Bitmap(width, height); 137 | 138 | destImage.SetResolution(image.HorizontalResolution, image.VerticalResolution); 139 | 140 | using (var graphics = Graphics.FromImage(destImage)) 141 | { 142 | graphics.CompositingMode = CompositingMode.SourceCopy; 143 | graphics.CompositingQuality = CompositingQuality.HighQuality; 144 | graphics.InterpolationMode = InterpolationMode.HighQualityBicubic; 145 | graphics.SmoothingMode = SmoothingMode.HighQuality; 146 | graphics.PixelOffsetMode = PixelOffsetMode.HighQuality; 147 | 148 | using (var wrapMode = new ImageAttributes()) 149 | { 150 | wrapMode.SetWrapMode(WrapMode.TileFlipXY); 151 | graphics.DrawImage(image, destRect, 0, 0, image.Width, image.Height, GraphicsUnit.Pixel, wrapMode); 152 | } 153 | } 154 | 155 | return destImage; 156 | } 157 | 158 | //draw mask to image 159 | void drawMask() 160 | { 161 | if (mask == null) return; 162 | 163 | Bitmap maskdraw = new Bitmap(mask.Width, mask.Height); 164 | Graphics g = Graphics.FromImage(maskdraw); 165 | g.DrawImage(mask, 0, 0); 166 | int[,] classes = RemakeMask(maskdraw); 167 | pbMask.Image = maskdraw; 168 | 169 | if (bmp != null && chkOverlay.Checked) 170 | { 171 | Bitmap bmpover = (Bitmap)bmp.Clone(); 172 | DrawOverlay(bmpover, classes); 173 | 174 | picImg.Image = bmpover; 175 | } 176 | else picImg.Image = bmp; 177 | } 178 | 179 | public int[,] RemakeMask(Bitmap processedBitmap) 180 | { 181 | int W = processedBitmap.Width; 182 | BitmapData bitmapData = processedBitmap.LockBits(new Rectangle(0, 0, processedBitmap.Width, processedBitmap.Height), ImageLockMode.ReadWrite, processedBitmap.PixelFormat); 183 | 184 | int[,] vals = new int[W, processedBitmap.Height]; 185 | 186 | 187 | int bytesPerPixel = Bitmap.GetPixelFormatSize(processedBitmap.PixelFormat) / 8; 188 | int byteCount = bitmapData.Stride * processedBitmap.Height; 189 | byte[] pixels = new byte[byteCount]; 190 | IntPtr ptrFirstPixel = bitmapData.Scan0; 191 | Marshal.Copy(ptrFirstPixel, pixels, 0, pixels.Length); 192 | int heightInPixels = bitmapData.Height; 193 | int widthInBytes = bitmapData.Width * bytesPerPixel; 194 | 195 | int xx, yy; 196 | yy = 0; 197 | 198 | for (int y = 0; y < heightInPixels; y++) 199 | { 200 | int currentLine = y * bitmapData.Stride; 201 | xx = 0; 202 | for (int x = 0; x < widthInBytes; x = x + bytesPerPixel) 203 | { 204 | int oldBlue = pixels[currentLine + x]; 205 | int oldGreen = pixels[currentLine + x + 1]; 206 | int oldRed = pixels[currentLine + x + 2]; 207 | 208 | //replace with label_colors 209 | byte r = label_colors[oldBlue].R; 210 | byte g = label_colors[oldBlue].G; 211 | byte b = label_colors[oldBlue].B; 212 | 213 | if (chkLayers.GetItemChecked(oldBlue)) 214 | { 215 | pixels[currentLine + x] = b; 216 | pixels[currentLine + x + 1] = g; 217 | pixels[currentLine + x + 2] = r; 218 | pixels[currentLine + x + 3] = 255; 219 | } 220 | else 221 | { 222 | pixels[currentLine + x] = 0; 223 | pixels[currentLine + x + 1] = 0; 224 | pixels[currentLine + x + 2] = 0; 225 | pixels[currentLine + x + 3] = 255; 226 | } 227 | vals[xx, yy] = oldBlue; 228 | 229 | xx++; 230 | } 231 | yy++; 232 | } 233 | 234 | // copy modified bytes back 235 | Marshal.Copy(pixels, 0, ptrFirstPixel, pixels.Length); 236 | processedBitmap.UnlockBits(bitmapData); 237 | 238 | return vals; 239 | } 240 | 241 | 242 | public void DrawOverlay(Bitmap processedBitmap, int[,] vals) 243 | { 244 | int W = processedBitmap.Width; 245 | BitmapData bitmapData = processedBitmap.LockBits(new Rectangle(0, 0, processedBitmap.Width, processedBitmap.Height), ImageLockMode.ReadWrite, processedBitmap.PixelFormat); 246 | 247 | int bytesPerPixel = Bitmap.GetPixelFormatSize(processedBitmap.PixelFormat) / 8; 248 | int byteCount = bitmapData.Stride * processedBitmap.Height; 249 | byte[] pixels = new byte[byteCount]; 250 | IntPtr ptrFirstPixel = bitmapData.Scan0; 251 | Marshal.Copy(ptrFirstPixel, pixels, 0, pixels.Length); 252 | int heightInPixels = bitmapData.Height; 253 | int widthInBytes = bitmapData.Width * bytesPerPixel; 254 | 255 | int xx, yy; 256 | yy = 0; 257 | 258 | for (int y = 0; y < heightInPixels; y++) 259 | { 260 | int currentLine = y * bitmapData.Stride; 261 | xx = 0; 262 | for (int x = 0; x < widthInBytes; x = x + bytesPerPixel) 263 | { 264 | int oldBlue = pixels[currentLine + x]; 265 | int oldGreen = pixels[currentLine + x + 1]; 266 | int oldRed = pixels[currentLine + x + 2]; 267 | 268 | //merge with label_colors 269 | byte r = label_colors[vals[xx,yy]].R; 270 | byte g = label_colors[vals[xx, yy]].G; 271 | byte b = label_colors[vals[xx, yy]].B; 272 | 273 | float maskIntens = 0.5f; 274 | byte avg = (byte)( (1-maskIntens) * (oldBlue + oldGreen + oldRed) / 3.0f); 275 | r = (byte)(maskIntens * r); 276 | g = (byte)(maskIntens * g); 277 | b = (byte)(maskIntens * b); 278 | 279 | if (chkLayers.GetItemChecked(vals[xx,yy])) 280 | { 281 | pixels[currentLine + x] = (byte)(avg + b); 282 | pixels[currentLine + x + 1] = (byte)(avg + g); 283 | pixels[currentLine + x + 2] = (byte)(avg + r); 284 | pixels[currentLine + x + 3] = 255; 285 | } 286 | else 287 | { 288 | pixels[currentLine + x] = avg; 289 | pixels[currentLine + x + 1] = avg; 290 | pixels[currentLine + x + 2] = avg; 291 | pixels[currentLine + x + 3] = 255; 292 | } 293 | 294 | xx++; 295 | } 296 | yy++; 297 | } 298 | 299 | // copy modified bytes back 300 | Marshal.Copy(pixels, 0, ptrFirstPixel, pixels.Length); 301 | processedBitmap.UnlockBits(bitmapData); 302 | } 303 | 304 | #endregion 305 | 306 | private void picImg_MouseMove(object sender, MouseEventArgs e) 307 | { 308 | if (bmp!=null && mask!=null) 309 | { 310 | Color c = mask.GetPixel(e.X, e.Y); 311 | gbImages.Text = label_names[c.R]; 312 | } 313 | } 314 | } 315 | } 316 | -------------------------------------------------------------------------------- /csharp/AzureImageModelDeployExample/AzureImageModelDeployExample/Form1.Designer.cs: -------------------------------------------------------------------------------- 1 | namespace AzureImageModelDeployExample 2 | { 3 | partial class Form1 4 | { 5 | /// 6 | /// Required designer variable. 7 | /// 8 | private System.ComponentModel.IContainer components = null; 9 | 10 | /// 11 | /// Clean up any resources being used. 12 | /// 13 | /// true if managed resources should be disposed; otherwise, false. 14 | protected override void Dispose(bool disposing) 15 | { 16 | if (disposing && (components != null)) 17 | { 18 | components.Dispose(); 19 | } 20 | base.Dispose(disposing); 21 | } 22 | 23 | #region Windows Form Designer generated code 24 | 25 | /// 26 | /// Required method for Designer support - do not modify 27 | /// the contents of this method with the code editor. 28 | /// 29 | private void InitializeComponent() 30 | { 31 | this.gbImgUpload = new System.Windows.Forms.GroupBox(); 32 | this.txtResultsendpoint = new System.Windows.Forms.TextBox(); 33 | this.txtPredictEndpoint = new System.Windows.Forms.TextBox(); 34 | this.btnGetResults = new System.Windows.Forms.Button(); 35 | this.btnSendPredict = new System.Windows.Forms.Button(); 36 | this.btnLoadImage = new System.Windows.Forms.Button(); 37 | this.label2 = new System.Windows.Forms.Label(); 38 | this.label1 = new System.Windows.Forms.Label(); 39 | this.gbImages = new System.Windows.Forms.GroupBox(); 40 | this.pbMask = new System.Windows.Forms.PictureBox(); 41 | this.picImg = new System.Windows.Forms.PictureBox(); 42 | this.label3 = new System.Windows.Forms.Label(); 43 | this.lbl_taskid = new System.Windows.Forms.Label(); 44 | this.groupBox1 = new System.Windows.Forms.GroupBox(); 45 | this.chkOverlay = new System.Windows.Forms.CheckBox(); 46 | this.chkLayers = new System.Windows.Forms.CheckedListBox(); 47 | this.gbImgUpload.SuspendLayout(); 48 | this.gbImages.SuspendLayout(); 49 | ((System.ComponentModel.ISupportInitialize)(this.pbMask)).BeginInit(); 50 | ((System.ComponentModel.ISupportInitialize)(this.picImg)).BeginInit(); 51 | this.groupBox1.SuspendLayout(); 52 | this.SuspendLayout(); 53 | // 54 | // gbImgUpload 55 | // 56 | this.gbImgUpload.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) 57 | | System.Windows.Forms.AnchorStyles.Right))); 58 | this.gbImgUpload.Controls.Add(this.lbl_taskid); 59 | this.gbImgUpload.Controls.Add(this.label3); 60 | this.gbImgUpload.Controls.Add(this.txtResultsendpoint); 61 | this.gbImgUpload.Controls.Add(this.txtPredictEndpoint); 62 | this.gbImgUpload.Controls.Add(this.btnGetResults); 63 | this.gbImgUpload.Controls.Add(this.btnSendPredict); 64 | this.gbImgUpload.Controls.Add(this.btnLoadImage); 65 | this.gbImgUpload.Controls.Add(this.label2); 66 | this.gbImgUpload.Controls.Add(this.label1); 67 | this.gbImgUpload.Location = new System.Drawing.Point(12, 12); 68 | this.gbImgUpload.Name = "gbImgUpload"; 69 | this.gbImgUpload.Size = new System.Drawing.Size(1237, 135); 70 | this.gbImgUpload.TabIndex = 0; 71 | this.gbImgUpload.TabStop = false; 72 | this.gbImgUpload.Text = "Select image to upload"; 73 | // 74 | // txtResultsendpoint 75 | // 76 | this.txtResultsendpoint.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) 77 | | System.Windows.Forms.AnchorStyles.Right))); 78 | this.txtResultsendpoint.Location = new System.Drawing.Point(99, 96); 79 | this.txtResultsendpoint.Name = "txtResultsendpoint"; 80 | this.txtResultsendpoint.Size = new System.Drawing.Size(1132, 20); 81 | this.txtResultsendpoint.TabIndex = 2; 82 | this.txtResultsendpoint.Text = "https://blobstorefuncteste.azurewebsites.net/api/getresult_v0?code=i29ppJFs7cfv/p" + 83 | "QgfF4KD40YsWKbLYvgI9JnRwPqvhUG1mCUYixdwQ=="; 84 | // 85 | // txtPredictEndpoint 86 | // 87 | this.txtPredictEndpoint.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) 88 | | System.Windows.Forms.AnchorStyles.Right))); 89 | this.txtPredictEndpoint.Location = new System.Drawing.Point(99, 73); 90 | this.txtPredictEndpoint.Name = "txtPredictEndpoint"; 91 | this.txtPredictEndpoint.Size = new System.Drawing.Size(1132, 20); 92 | this.txtPredictEndpoint.TabIndex = 2; 93 | this.txtPredictEndpoint.Text = "https://blobstorefuncteste.azurewebsites.net/api/predict_v0?code=XHeGapCuyxD7N3ft" + 94 | "oOOPUTCZY0XHMC0XtdfbzjG0/GcWLFoRCYlk9w=="; 95 | // 96 | // btnGetResults 97 | // 98 | this.btnGetResults.Font = new System.Drawing.Font("Microsoft Sans Serif", 14.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); 99 | this.btnGetResults.Location = new System.Drawing.Point(353, 19); 100 | this.btnGetResults.Name = "btnGetResults"; 101 | this.btnGetResults.Size = new System.Drawing.Size(192, 43); 102 | this.btnGetResults.TabIndex = 1; 103 | this.btnGetResults.Text = "Retrieve results"; 104 | this.btnGetResults.UseVisualStyleBackColor = true; 105 | this.btnGetResults.Click += new System.EventHandler(this.btnGetResults_Click); 106 | // 107 | // btnSendPredict 108 | // 109 | this.btnSendPredict.Enabled = false; 110 | this.btnSendPredict.Font = new System.Drawing.Font("Microsoft Sans Serif", 14.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); 111 | this.btnSendPredict.Location = new System.Drawing.Point(163, 19); 112 | this.btnSendPredict.Name = "btnSendPredict"; 113 | this.btnSendPredict.Size = new System.Drawing.Size(184, 43); 114 | this.btnSendPredict.TabIndex = 1; 115 | this.btnSendPredict.Text = "Send for prediction"; 116 | this.btnSendPredict.UseVisualStyleBackColor = true; 117 | this.btnSendPredict.Click += new System.EventHandler(this.btnSendPredict_Click); 118 | // 119 | // btnLoadImage 120 | // 121 | this.btnLoadImage.Font = new System.Drawing.Font("Microsoft Sans Serif", 14.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); 122 | this.btnLoadImage.Location = new System.Drawing.Point(9, 19); 123 | this.btnLoadImage.Name = "btnLoadImage"; 124 | this.btnLoadImage.Size = new System.Drawing.Size(148, 43); 125 | this.btnLoadImage.TabIndex = 1; 126 | this.btnLoadImage.Text = "Load Image..."; 127 | this.btnLoadImage.UseVisualStyleBackColor = true; 128 | this.btnLoadImage.Click += new System.EventHandler(this.btnLoadImage_Click); 129 | // 130 | // label2 131 | // 132 | this.label2.AutoSize = true; 133 | this.label2.Location = new System.Drawing.Point(6, 99); 134 | this.label2.Name = "label2"; 135 | this.label2.Size = new System.Drawing.Size(89, 13); 136 | this.label2.TabIndex = 0; 137 | this.label2.Text = "Results endpoint:"; 138 | // 139 | // label1 140 | // 141 | this.label1.AutoSize = true; 142 | this.label1.Location = new System.Drawing.Point(6, 76); 143 | this.label1.Name = "label1"; 144 | this.label1.Size = new System.Drawing.Size(87, 13); 145 | this.label1.TabIndex = 0; 146 | this.label1.Text = "Predict endpoint:"; 147 | // 148 | // gbImages 149 | // 150 | this.gbImages.Controls.Add(this.pbMask); 151 | this.gbImages.Controls.Add(this.picImg); 152 | this.gbImages.Location = new System.Drawing.Point(200, 153); 153 | this.gbImages.Name = "gbImages"; 154 | this.gbImages.Size = new System.Drawing.Size(1049, 538); 155 | this.gbImages.TabIndex = 1; 156 | this.gbImages.TabStop = false; 157 | this.gbImages.Text = "Image"; 158 | // 159 | // pbMask 160 | // 161 | this.pbMask.Cursor = System.Windows.Forms.Cursors.Cross; 162 | this.pbMask.Location = new System.Drawing.Point(527, 19); 163 | this.pbMask.Name = "pbMask"; 164 | this.pbMask.Size = new System.Drawing.Size(512, 512); 165 | this.pbMask.TabIndex = 0; 166 | this.pbMask.TabStop = false; 167 | // 168 | // picImg 169 | // 170 | this.picImg.Cursor = System.Windows.Forms.Cursors.Cross; 171 | this.picImg.Location = new System.Drawing.Point(9, 19); 172 | this.picImg.Name = "picImg"; 173 | this.picImg.Size = new System.Drawing.Size(512, 512); 174 | this.picImg.TabIndex = 0; 175 | this.picImg.TabStop = false; 176 | this.picImg.MouseMove += new System.Windows.Forms.MouseEventHandler(this.picImg_MouseMove); 177 | // 178 | // label3 179 | // 180 | this.label3.AutoSize = true; 181 | this.label3.Location = new System.Drawing.Point(551, 35); 182 | this.label3.Name = "label3"; 183 | this.label3.Size = new System.Drawing.Size(45, 13); 184 | this.label3.TabIndex = 3; 185 | this.label3.Text = "Task id:"; 186 | // 187 | // lbl_taskid 188 | // 189 | this.lbl_taskid.AutoSize = true; 190 | this.lbl_taskid.Location = new System.Drawing.Point(602, 35); 191 | this.lbl_taskid.Name = "lbl_taskid"; 192 | this.lbl_taskid.Size = new System.Drawing.Size(242, 13); 193 | this.lbl_taskid.TabIndex = 3; 194 | this.lbl_taskid.Text = "59729da0-affb-45ec-b867-c9544f935946.payload"; 195 | // 196 | // groupBox1 197 | // 198 | this.groupBox1.Controls.Add(this.chkLayers); 199 | this.groupBox1.Controls.Add(this.chkOverlay); 200 | this.groupBox1.Location = new System.Drawing.Point(12, 154); 201 | this.groupBox1.Name = "groupBox1"; 202 | this.groupBox1.Size = new System.Drawing.Size(182, 537); 203 | this.groupBox1.TabIndex = 2; 204 | this.groupBox1.TabStop = false; 205 | this.groupBox1.Text = "Categories"; 206 | // 207 | // chkOverlay 208 | // 209 | this.chkOverlay.AutoSize = true; 210 | this.chkOverlay.Font = new System.Drawing.Font("Microsoft Sans Serif", 12F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); 211 | this.chkOverlay.Location = new System.Drawing.Point(6, 30); 212 | this.chkOverlay.Name = "chkOverlay"; 213 | this.chkOverlay.Size = new System.Drawing.Size(130, 24); 214 | this.chkOverlay.TabIndex = 0; 215 | this.chkOverlay.Text = "Overlay masks"; 216 | this.chkOverlay.UseVisualStyleBackColor = true; 217 | this.chkOverlay.CheckedChanged += new System.EventHandler(this.chkOverlay_CheckedChanged); 218 | // 219 | // chkLayers 220 | // 221 | this.chkLayers.CheckOnClick = true; 222 | this.chkLayers.Font = new System.Drawing.Font("Microsoft Sans Serif", 11.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); 223 | this.chkLayers.FormattingEnabled = true; 224 | this.chkLayers.Location = new System.Drawing.Point(9, 71); 225 | this.chkLayers.Name = "chkLayers"; 226 | this.chkLayers.Size = new System.Drawing.Size(167, 441); 227 | this.chkLayers.TabIndex = 1; 228 | this.chkLayers.SelectedIndexChanged += new System.EventHandler(this.chkLayers_SelectedIndexChanged); 229 | // 230 | // Form1 231 | // 232 | this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); 233 | this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; 234 | this.ClientSize = new System.Drawing.Size(1261, 703); 235 | this.Controls.Add(this.groupBox1); 236 | this.Controls.Add(this.gbImages); 237 | this.Controls.Add(this.gbImgUpload); 238 | this.Name = "Form1"; 239 | this.Text = "Azure Image Model Deploy Example"; 240 | this.Load += new System.EventHandler(this.Form1_Load); 241 | this.gbImgUpload.ResumeLayout(false); 242 | this.gbImgUpload.PerformLayout(); 243 | this.gbImages.ResumeLayout(false); 244 | ((System.ComponentModel.ISupportInitialize)(this.pbMask)).EndInit(); 245 | ((System.ComponentModel.ISupportInitialize)(this.picImg)).EndInit(); 246 | this.groupBox1.ResumeLayout(false); 247 | this.groupBox1.PerformLayout(); 248 | this.ResumeLayout(false); 249 | 250 | } 251 | 252 | #endregion 253 | 254 | private System.Windows.Forms.GroupBox gbImgUpload; 255 | private System.Windows.Forms.TextBox txtResultsendpoint; 256 | private System.Windows.Forms.TextBox txtPredictEndpoint; 257 | private System.Windows.Forms.Button btnGetResults; 258 | private System.Windows.Forms.Button btnSendPredict; 259 | private System.Windows.Forms.Button btnLoadImage; 260 | private System.Windows.Forms.Label label2; 261 | private System.Windows.Forms.Label label1; 262 | private System.Windows.Forms.GroupBox gbImages; 263 | private System.Windows.Forms.PictureBox picImg; 264 | private System.Windows.Forms.PictureBox pbMask; 265 | private System.Windows.Forms.Label lbl_taskid; 266 | private System.Windows.Forms.Label label3; 267 | private System.Windows.Forms.GroupBox groupBox1; 268 | private System.Windows.Forms.CheckBox chkOverlay; 269 | private System.Windows.Forms.CheckedListBox chkLayers; 270 | } 271 | } 272 | 273 | -------------------------------------------------------------------------------- /python/deepLab.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ Deeplabv3+ model for Keras. 4 | This model is based on TF repo: 5 | https://github.com/tensorflow/models/tree/master/research/deeplab 6 | On Pascal VOC, original model gets to 84.56% mIOU 7 | 8 | Now this model is only available for the TensorFlow backend, 9 | due to its reliance on `SeparableConvolution` layers, but Theano will add 10 | this layer soon. 11 | 12 | MobileNetv2 backbone is based on this repo: 13 | https://github.com/JonathanCMitchell/mobilenet_v2_keras 14 | 15 | # Reference 16 | - [Encoder-Decoder with Atrous Separable Convolution 17 | for Semantic Image Segmentation](https://arxiv.org/pdf/1802.02611.pdf) 18 | - [Xception: Deep Learning with Depthwise Separable Convolutions] 19 | (https://arxiv.org/abs/1610.02357) 20 | - [Inverted Residuals and Linear Bottlenecks: Mobile Networks for 21 | Classification, Detection and Segmentation](https://arxiv.org/abs/1801.04381) 22 | """ 23 | 24 | from __future__ import absolute_import 25 | from __future__ import division 26 | from __future__ import print_function 27 | 28 | import numpy as np 29 | 30 | from keras.models import Model 31 | from keras import layers 32 | from keras.layers import Input 33 | from keras.layers import Activation 34 | from keras.layers import Concatenate 35 | from keras.layers import Add 36 | from keras.layers import Dropout 37 | from keras.layers import BatchNormalization 38 | from keras.layers import Conv2D 39 | from keras.layers import DepthwiseConv2D 40 | from keras.layers import ZeroPadding2D 41 | from keras.layers import AveragePooling2D 42 | from keras.engine import Layer 43 | from keras.engine import InputSpec 44 | from keras.engine.topology import get_source_inputs 45 | from keras import backend as K 46 | from keras.applications import imagenet_utils 47 | from keras.utils import conv_utils 48 | from keras.utils.data_utils import get_file 49 | 50 | WEIGHTS_PATH_X = "https://github.com/bonlime/keras-deeplab-v3-plus/releases/download/1.1/deeplabv3_xception_tf_dim_ordering_tf_kernels.h5" 51 | WEIGHTS_PATH_MOBILE = "https://github.com/bonlime/keras-deeplab-v3-plus/releases/download/1.1/deeplabv3_mobilenetv2_tf_dim_ordering_tf_kernels.h5" 52 | 53 | 54 | class BilinearUpsampling(Layer): 55 | """Just a simple bilinear upsampling layer. Works only with TF. 56 | Args: 57 | upsampling: tuple of 2 numbers > 0. The upsampling ratio for h and w 58 | output_size: used instead of upsampling arg if passed! 59 | """ 60 | 61 | def __init__(self, upsampling=(2, 2), output_size=None, data_format=None, **kwargs): 62 | 63 | super(BilinearUpsampling, self).__init__(**kwargs) 64 | 65 | self.data_format = K.normalize_data_format(data_format) 66 | self.input_spec = InputSpec(ndim=4) 67 | if output_size: 68 | self.output_size = conv_utils.normalize_tuple( 69 | output_size, 2, 'output_size') 70 | self.upsampling = None 71 | else: 72 | self.output_size = None 73 | self.upsampling = conv_utils.normalize_tuple( 74 | upsampling, 2, 'upsampling') 75 | 76 | def compute_output_shape(self, input_shape): 77 | if self.upsampling: 78 | height = self.upsampling[0] * \ 79 | input_shape[1] if input_shape[1] is not None else None 80 | width = self.upsampling[1] * \ 81 | input_shape[2] if input_shape[2] is not None else None 82 | else: 83 | height = self.output_size[0] 84 | width = self.output_size[1] 85 | return (input_shape[0], 86 | height, 87 | width, 88 | input_shape[3]) 89 | 90 | def call(self, inputs): 91 | if self.upsampling: 92 | return K.tf.image.resize_bilinear(inputs, (inputs.shape[1] * self.upsampling[0], 93 | inputs.shape[2] * self.upsampling[1]), 94 | align_corners=True) 95 | else: 96 | return K.tf.image.resize_bilinear(inputs, (self.output_size[0], 97 | self.output_size[1]), 98 | align_corners=True) 99 | 100 | def get_config(self): 101 | config = {'upsampling': self.upsampling, 102 | 'output_size': self.output_size, 103 | 'data_format': self.data_format} 104 | base_config = super(BilinearUpsampling, self).get_config() 105 | return dict(list(base_config.items()) + list(config.items())) 106 | 107 | 108 | def SepConv_BN(x, filters, prefix, stride=1, kernel_size=3, rate=1, depth_activation=False, epsilon=1e-3): 109 | """ SepConv with BN between depthwise & pointwise. Optionally add activation after BN 110 | Implements right "same" padding for even kernel sizes 111 | Args: 112 | x: input tensor 113 | filters: num of filters in pointwise convolution 114 | prefix: prefix before name 115 | stride: stride at depthwise conv 116 | kernel_size: kernel size for depthwise convolution 117 | rate: atrous rate for depthwise convolution 118 | depth_activation: flag to use activation between depthwise & poinwise convs 119 | epsilon: epsilon to use in BN layer 120 | """ 121 | 122 | if stride == 1: 123 | depth_padding = 'same' 124 | else: 125 | kernel_size_effective = kernel_size + (kernel_size - 1) * (rate - 1) 126 | pad_total = kernel_size_effective - 1 127 | pad_beg = pad_total // 2 128 | pad_end = pad_total - pad_beg 129 | x = ZeroPadding2D((pad_beg, pad_end))(x) 130 | depth_padding = 'valid' 131 | 132 | if not depth_activation: 133 | x = Activation('relu')(x) 134 | x = DepthwiseConv2D((kernel_size, kernel_size), strides=(stride, stride), dilation_rate=(rate, rate), 135 | padding=depth_padding, use_bias=False, name=prefix + '_depthwise')(x) 136 | x = BatchNormalization(name=prefix + '_depthwise_BN', epsilon=epsilon)(x) 137 | if depth_activation: 138 | x = Activation('relu')(x) 139 | x = Conv2D(filters, (1, 1), padding='same', 140 | use_bias=False, name=prefix + '_pointwise')(x) 141 | x = BatchNormalization(name=prefix + '_pointwise_BN', epsilon=epsilon)(x) 142 | if depth_activation: 143 | x = Activation('relu')(x) 144 | 145 | return x 146 | 147 | 148 | def _conv2d_same(x, filters, prefix, stride=1, kernel_size=3, rate=1): 149 | """Implements right 'same' padding for even kernel sizes 150 | Without this there is a 1 pixel drift when stride = 2 151 | Args: 152 | x: input tensor 153 | filters: num of filters in pointwise convolution 154 | prefix: prefix before name 155 | stride: stride at depthwise conv 156 | kernel_size: kernel size for depthwise convolution 157 | rate: atrous rate for depthwise convolution 158 | """ 159 | if stride == 1: 160 | return Conv2D(filters, 161 | (kernel_size, kernel_size), 162 | strides=(stride, stride), 163 | padding='same', use_bias=False, 164 | dilation_rate=(rate, rate), 165 | name=prefix)(x) 166 | else: 167 | kernel_size_effective = kernel_size + (kernel_size - 1) * (rate - 1) 168 | pad_total = kernel_size_effective - 1 169 | pad_beg = pad_total // 2 170 | pad_end = pad_total - pad_beg 171 | x = ZeroPadding2D((pad_beg, pad_end))(x) 172 | return Conv2D(filters, 173 | (kernel_size, kernel_size), 174 | strides=(stride, stride), 175 | padding='valid', use_bias=False, 176 | dilation_rate=(rate, rate), 177 | name=prefix)(x) 178 | 179 | 180 | def _xception_block(inputs, depth_list, prefix, skip_connection_type, stride, 181 | rate=1, depth_activation=False, return_skip=False): 182 | """ Basic building block of modified Xception network 183 | Args: 184 | inputs: input tensor 185 | depth_list: number of filters in each SepConv layer. len(depth_list) == 3 186 | prefix: prefix before name 187 | skip_connection_type: one of {'conv','sum','none'} 188 | stride: stride at last depthwise conv 189 | rate: atrous rate for depthwise convolution 190 | depth_activation: flag to use activation between depthwise & pointwise convs 191 | return_skip: flag to return additional tensor after 2 SepConvs for decoder 192 | """ 193 | residual = inputs 194 | for i in range(3): 195 | residual = SepConv_BN(residual, 196 | depth_list[i], 197 | prefix + '_separable_conv{}'.format(i + 1), 198 | stride=stride if i == 2 else 1, 199 | rate=rate, 200 | depth_activation=depth_activation) 201 | if i == 1: 202 | skip = residual 203 | if skip_connection_type == 'conv': 204 | shortcut = _conv2d_same(inputs, depth_list[-1], prefix + '_shortcut', 205 | kernel_size=1, 206 | stride=stride) 207 | shortcut = BatchNormalization(name=prefix + '_shortcut_BN')(shortcut) 208 | outputs = layers.add([residual, shortcut]) 209 | elif skip_connection_type == 'sum': 210 | outputs = layers.add([residual, inputs]) 211 | elif skip_connection_type == 'none': 212 | outputs = residual 213 | if return_skip: 214 | return outputs, skip 215 | else: 216 | return outputs 217 | 218 | 219 | def relu6(x): 220 | return K.relu(x, max_value=6) 221 | 222 | 223 | def _make_divisible(v, divisor, min_value=None): 224 | if min_value is None: 225 | min_value = divisor 226 | new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) 227 | # Make sure that round down does not go down by more than 10%. 228 | if new_v < 0.9 * v: 229 | new_v += divisor 230 | return new_v 231 | 232 | 233 | def _inverted_res_block(inputs, expansion, stride, alpha, filters, block_id, skip_connection, rate=1): 234 | in_channels = inputs._keras_shape[-1] 235 | pointwise_conv_filters = int(filters * alpha) 236 | pointwise_filters = _make_divisible(pointwise_conv_filters, 8) 237 | x = inputs 238 | prefix = 'expanded_conv_{}_'.format(block_id) 239 | if block_id: 240 | # Expand 241 | 242 | x = Conv2D(expansion * in_channels, kernel_size=1, padding='same', 243 | use_bias=False, activation=None, 244 | name=prefix + 'expand')(x) 245 | x = BatchNormalization(epsilon=1e-3, momentum=0.999, 246 | name=prefix + 'expand_BN')(x) 247 | x = Activation(relu6, name=prefix + 'expand_relu')(x) 248 | else: 249 | prefix = 'expanded_conv_' 250 | # Depthwise 251 | x = DepthwiseConv2D(kernel_size=3, strides=stride, activation=None, 252 | use_bias=False, padding='same', dilation_rate=(rate, rate), 253 | name=prefix + 'depthwise')(x) 254 | x = BatchNormalization(epsilon=1e-3, momentum=0.999, 255 | name=prefix + 'depthwise_BN')(x) 256 | 257 | x = Activation(relu6, name=prefix + 'depthwise_relu')(x) 258 | 259 | # Project 260 | x = Conv2D(pointwise_filters, 261 | kernel_size=1, padding='same', use_bias=False, activation=None, 262 | name=prefix + 'project')(x) 263 | x = BatchNormalization(epsilon=1e-3, momentum=0.999, 264 | name=prefix + 'project_BN')(x) 265 | 266 | if skip_connection: 267 | return Add(name=prefix + 'add')([inputs, x]) 268 | 269 | # if in_channels == pointwise_filters and stride == 1: 270 | # return Add(name='res_connect_' + str(block_id))([inputs, x]) 271 | 272 | return x 273 | 274 | 275 | def Deeplabv3(weights='pascal_voc', input_tensor=None, input_shape=(512, 512, 3), classes=21, backbone='mobilenetv2', OS=16, alpha=1.): 276 | """ Instantiates the Deeplabv3+ architecture 277 | 278 | Optionally loads weights pre-trained 279 | on PASCAL VOC. This model is available for TensorFlow only, 280 | and can only be used with inputs following the TensorFlow 281 | data format `(width, height, channels)`. 282 | # Arguments 283 | weights: one of 'pascal_voc' (pre-trained on pascal voc) 284 | or None (random initialization) 285 | input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) 286 | to use as image input for the model. 287 | input_shape: shape of input image. format HxWxC 288 | PASCAL VOC model was trained on (512,512,3) images 289 | classes: number of desired classes. If classes != 21, 290 | last layer is initialized randomly 291 | backbone: backbone to use. one of {'xception','mobilenetv2'} 292 | OS: determines input_shape/feature_extractor_output ratio. One of {8,16}. 293 | Used only for xception backbone. 294 | alpha: controls the width of the MobileNetV2 network. This is known as the 295 | width multiplier in the MobileNetV2 paper. 296 | - If `alpha` < 1.0, proportionally decreases the number 297 | of filters in each layer. 298 | - If `alpha` > 1.0, proportionally increases the number 299 | of filters in each layer. 300 | - If `alpha` = 1, default number of filters from the paper 301 | are used at each layer. 302 | Used only for mobilenetv2 backbone 303 | 304 | # Returns 305 | A Keras model instance. 306 | 307 | # Raises 308 | RuntimeError: If attempting to run this model with a 309 | backend that does not support separable convolutions. 310 | ValueError: in case of invalid argument for `weights` or `backbone` 311 | 312 | """ 313 | 314 | if not (weights in {'pascal_voc', None}): 315 | raise ValueError('The `weights` argument should be either ' 316 | '`None` (random initialization) or `pascal_voc` ' 317 | '(pre-trained on PASCAL VOC)') 318 | 319 | if K.backend() != 'tensorflow': 320 | raise RuntimeError('The Deeplabv3+ model is only available with ' 321 | 'the TensorFlow backend.') 322 | 323 | if not (backbone in {'xception', 'mobilenetv2'}): 324 | raise ValueError('The `backbone` argument should be either ' 325 | '`xception` or `mobilenetv2` ') 326 | 327 | if input_tensor is None: 328 | img_input = Input(shape=input_shape) 329 | else: 330 | if not K.is_keras_tensor(input_tensor): 331 | img_input = Input(tensor=input_tensor, shape=input_shape) 332 | else: 333 | img_input = input_tensor 334 | 335 | if backbone == 'xception': 336 | if OS == 8: 337 | entry_block3_stride = 1 338 | middle_block_rate = 2 # ! Not mentioned in paper, but required 339 | exit_block_rates = (2, 4) 340 | atrous_rates = (12, 24, 36) 341 | else: 342 | entry_block3_stride = 2 343 | middle_block_rate = 1 344 | exit_block_rates = (1, 2) 345 | atrous_rates = (6, 12, 18) 346 | 347 | x = Conv2D(32, (3, 3), strides=(2, 2), 348 | name='entry_flow_conv1_1', use_bias=False, padding='same')(img_input) 349 | x = BatchNormalization(name='entry_flow_conv1_1_BN')(x) 350 | x = Activation('relu')(x) 351 | 352 | x = _conv2d_same(x, 64, 'entry_flow_conv1_2', kernel_size=3, stride=1) 353 | x = BatchNormalization(name='entry_flow_conv1_2_BN')(x) 354 | x = Activation('relu')(x) 355 | 356 | x = _xception_block(x, [128, 128, 128], 'entry_flow_block1', 357 | skip_connection_type='conv', stride=2, 358 | depth_activation=False) 359 | x, skip1 = _xception_block(x, [256, 256, 256], 'entry_flow_block2', 360 | skip_connection_type='conv', stride=2, 361 | depth_activation=False, return_skip=True) 362 | 363 | x = _xception_block(x, [728, 728, 728], 'entry_flow_block3', 364 | skip_connection_type='conv', stride=entry_block3_stride, 365 | depth_activation=False) 366 | for i in range(16): 367 | x = _xception_block(x, [728, 728, 728], 'middle_flow_unit_{}'.format(i + 1), 368 | skip_connection_type='sum', stride=1, rate=middle_block_rate, 369 | depth_activation=False) 370 | 371 | x = _xception_block(x, [728, 1024, 1024], 'exit_flow_block1', 372 | skip_connection_type='conv', stride=1, rate=exit_block_rates[0], 373 | depth_activation=False) 374 | x = _xception_block(x, [1536, 1536, 2048], 'exit_flow_block2', 375 | skip_connection_type='none', stride=1, rate=exit_block_rates[1], 376 | depth_activation=True) 377 | 378 | else: 379 | OS = 8 380 | first_block_filters = _make_divisible(32 * alpha, 8) 381 | x = Conv2D(first_block_filters, 382 | kernel_size=3, 383 | strides=(2, 2), padding='same', 384 | use_bias=False, name='Conv')(img_input) 385 | x = BatchNormalization( 386 | epsilon=1e-3, momentum=0.999, name='Conv_BN')(x) 387 | x = Activation(relu6, name='Conv_Relu6')(x) 388 | 389 | x = _inverted_res_block(x, filters=16, alpha=alpha, stride=1, 390 | expansion=1, block_id=0, skip_connection=False) 391 | 392 | x = _inverted_res_block(x, filters=24, alpha=alpha, stride=2, 393 | expansion=6, block_id=1, skip_connection=False) 394 | x = _inverted_res_block(x, filters=24, alpha=alpha, stride=1, 395 | expansion=6, block_id=2, skip_connection=True) 396 | 397 | x = _inverted_res_block(x, filters=32, alpha=alpha, stride=2, 398 | expansion=6, block_id=3, skip_connection=False) 399 | x = _inverted_res_block(x, filters=32, alpha=alpha, stride=1, 400 | expansion=6, block_id=4, skip_connection=True) 401 | x = _inverted_res_block(x, filters=32, alpha=alpha, stride=1, 402 | expansion=6, block_id=5, skip_connection=True) 403 | 404 | # stride in block 6 changed from 2 -> 1, so we need to use rate = 2 405 | x = _inverted_res_block(x, filters=64, alpha=alpha, stride=1, # 1! 406 | expansion=6, block_id=6, skip_connection=False) 407 | x = _inverted_res_block(x, filters=64, alpha=alpha, stride=1, rate=2, 408 | expansion=6, block_id=7, skip_connection=True) 409 | x = _inverted_res_block(x, filters=64, alpha=alpha, stride=1, rate=2, 410 | expansion=6, block_id=8, skip_connection=True) 411 | x = _inverted_res_block(x, filters=64, alpha=alpha, stride=1, rate=2, 412 | expansion=6, block_id=9, skip_connection=True) 413 | 414 | x = _inverted_res_block(x, filters=96, alpha=alpha, stride=1, rate=2, 415 | expansion=6, block_id=10, skip_connection=False) 416 | x = _inverted_res_block(x, filters=96, alpha=alpha, stride=1, rate=2, 417 | expansion=6, block_id=11, skip_connection=True) 418 | x = _inverted_res_block(x, filters=96, alpha=alpha, stride=1, rate=2, 419 | expansion=6, block_id=12, skip_connection=True) 420 | 421 | x = _inverted_res_block(x, filters=160, alpha=alpha, stride=1, rate=2, # 1! 422 | expansion=6, block_id=13, skip_connection=False) 423 | x = _inverted_res_block(x, filters=160, alpha=alpha, stride=1, rate=4, 424 | expansion=6, block_id=14, skip_connection=True) 425 | x = _inverted_res_block(x, filters=160, alpha=alpha, stride=1, rate=4, 426 | expansion=6, block_id=15, skip_connection=True) 427 | 428 | x = _inverted_res_block(x, filters=320, alpha=alpha, stride=1, rate=4, 429 | expansion=6, block_id=16, skip_connection=False) 430 | 431 | # end of feature extractor 432 | 433 | # branching for Atrous Spatial Pyramid Pooling 434 | 435 | # Image Feature branch 436 | #out_shape = int(np.ceil(input_shape[0] / OS)) 437 | b4 = AveragePooling2D(pool_size=(int(np.ceil(input_shape[0] / OS)), int(np.ceil(input_shape[1] / OS))))(x) 438 | b4 = Conv2D(256, (1, 1), padding='same', 439 | use_bias=False, name='image_pooling')(b4) 440 | b4 = BatchNormalization(name='image_pooling_BN', epsilon=1e-5)(b4) 441 | b4 = Activation('relu')(b4) 442 | b4 = BilinearUpsampling((int(np.ceil(input_shape[0] / OS)), int(np.ceil(input_shape[1] / OS))))(b4) 443 | 444 | # simple 1x1 445 | b0 = Conv2D(256, (1, 1), padding='same', use_bias=False, name='aspp0')(x) 446 | b0 = BatchNormalization(name='aspp0_BN', epsilon=1e-5)(b0) 447 | b0 = Activation('relu', name='aspp0_activation')(b0) 448 | 449 | # there are only 2 branches in mobilenetV2. not sure why 450 | if backbone == 'xception': 451 | # rate = 6 (12) 452 | b1 = SepConv_BN(x, 256, 'aspp1', 453 | rate=atrous_rates[0], depth_activation=True, epsilon=1e-5) 454 | # rate = 12 (24) 455 | b2 = SepConv_BN(x, 256, 'aspp2', 456 | rate=atrous_rates[1], depth_activation=True, epsilon=1e-5) 457 | # rate = 18 (36) 458 | b3 = SepConv_BN(x, 256, 'aspp3', 459 | rate=atrous_rates[2], depth_activation=True, epsilon=1e-5) 460 | 461 | # concatenate ASPP branches & project 462 | x = Concatenate()([b4, b0, b1, b2, b3]) 463 | else: 464 | x = Concatenate()([b4, b0]) 465 | 466 | x = Conv2D(256, (1, 1), padding='same', 467 | use_bias=False, name='concat_projection')(x) 468 | x = BatchNormalization(name='concat_projection_BN', epsilon=1e-5)(x) 469 | x = Activation('relu')(x) 470 | x = Dropout(0.1)(x) 471 | 472 | # DeepLab v.3+ decoder 473 | 474 | if backbone == 'xception': 475 | # Feature projection 476 | # x4 (x2) block 477 | x = BilinearUpsampling(output_size=(int(np.ceil(input_shape[0] / 4)), 478 | int(np.ceil(input_shape[1] / 4))))(x) 479 | dec_skip1 = Conv2D(48, (1, 1), padding='same', 480 | use_bias=False, name='feature_projection0')(skip1) 481 | dec_skip1 = BatchNormalization( 482 | name='feature_projection0_BN', epsilon=1e-5)(dec_skip1) 483 | dec_skip1 = Activation('relu')(dec_skip1) 484 | x = Concatenate()([x, dec_skip1]) 485 | x = SepConv_BN(x, 256, 'decoder_conv0', 486 | depth_activation=True, epsilon=1e-5) 487 | x = SepConv_BN(x, 256, 'decoder_conv1', 488 | depth_activation=True, epsilon=1e-5) 489 | 490 | # you can use it with arbitary number of classes 491 | if classes == 21: 492 | last_layer_name = 'logits_semantic' 493 | else: 494 | last_layer_name = 'custom_logits_semantic' 495 | 496 | x = Conv2D(classes, (1, 1), padding='same', name=last_layer_name)(x) 497 | x = BilinearUpsampling(output_size=(input_shape[0], input_shape[1]))(x) 498 | 499 | # Ensure that the model takes into account 500 | # any potential predecessors of `input_tensor`. 501 | if input_tensor is not None: 502 | inputs = get_source_inputs(input_tensor) 503 | else: 504 | inputs = img_input 505 | 506 | model = Model(inputs, x, name='deeplabv3plus') 507 | 508 | # load weights 509 | 510 | if weights == 'pascal_voc': 511 | if backbone == 'xception': 512 | weights_path = get_file('deeplabv3_xception_tf_dim_ordering_tf_kernels.h5', 513 | WEIGHTS_PATH_X, 514 | cache_subdir='models') 515 | else: 516 | weights_path = get_file('deeplabv3_mobilenetv2_tf_dim_ordering_tf_kernels.h5', 517 | WEIGHTS_PATH_MOBILE, 518 | cache_subdir='models') 519 | model.load_weights(weights_path, by_name=True) 520 | return model 521 | 522 | 523 | def preprocess_input(x): 524 | """Preprocesses a numpy array encoding a batch of images. 525 | # Arguments 526 | x: a 4D numpy array consists of RGB values within [0, 255]. 527 | # Returns 528 | Input array scaled to [-1.,1.] 529 | """ 530 | return imagenet_utils.preprocess_input(x, mode='tf') --------------------------------------------------------------------------------