├── .github
├── CODE_OF_CONDUCT.md
├── ISSUE_TEMPLATE.md
└── PULL_REQUEST_TEMPLATE.md
├── 1_Connectivity
├── 01_Install_Features.ps1
├── 02_Create_VirtualSwitch.ps1
├── 03_Install_EFLOW.ps1
├── 04_Copy_OPCConfig.ps1
├── BasicQueries.kql
├── LineSimulationDemo.json
├── README.md
├── SetupDataExplorer.kql
├── moduleContent.json
└── opcconfig.json
├── 2_OperationalVisibility
├── LineSimulationDemo-2.json
├── README.md
├── TimeSeriesQueries.kql
├── anomaly-alert-workflow.json
├── iiot-operational-visibility-dashboard.json
└── opcconfig.json
├── 3_OEECalculationEngine
├── .gitignore
├── README.md
├── notebook
│ ├── CalculateOEE.ipynb
│ └── debugOEE.ipynb
├── package
│ ├── PACKAGEDETAILS.md
│ ├── dist
│ │ └── manufacturingmetrics-0.1.0-py3-none-any.whl
│ ├── manufacturingmetrics.egg-info
│ │ ├── PKG-INFO
│ │ ├── SOURCES.txt
│ │ ├── dependency_links.txt
│ │ ├── not-zip-safe
│ │ └── top_level.txt
│ ├── manufacturingmetrics
│ │ ├── LICENSE
│ │ ├── __init__.py
│ │ └── oee.py
│ ├── requirements.txt
│ ├── sample_env
│ ├── setup.py
│ └── test.py
├── powerbi
│ ├── oee.pbix
│ └── samplequery.sql
├── sqldb
│ └── mes-reporting.sql
└── synapse
│ ├── adxLinkedService.json
│ └── sqlLinkedService.json
├── 4_FactorySupplyChainTwin
└── README.md
├── 5_ExplorationDataAnalysis
├── README.md
├── SimulatedIndustrialSensors
│ ├── .devcontainer
│ │ ├── Dockerfile
│ │ ├── devcontainer.json
│ │ └── library-scripts
│ │ │ └── docker-in-docker-debian.sh
│ ├── .gitignore
│ ├── .vscode
│ │ └── launch.json
│ ├── deployment.debug.template.json
│ ├── deployment.template.json
│ └── modules
│ │ └── SimulatedManufacturingSensors
│ │ ├── .gitignore
│ │ ├── Dockerfile.amd64
│ │ ├── Dockerfile.amd64.debug
│ │ ├── Dockerfile.arm32v7
│ │ ├── Dockerfile.arm32v7.debug
│ │ ├── Dockerfile.arm64v8
│ │ ├── Dockerfile.arm64v8.debug
│ │ ├── main.py
│ │ ├── module.json
│ │ ├── requirements.txt
│ │ └── simulateddata.csv
├── VerifySimulatedData.kql
├── batch-quality-data.csv
├── moduleContent.json
└── notebooks
│ ├── 1_create_raw_dataset.ipynb
│ ├── 2_exploratory_analysis_feature_selection.ipynb
│ ├── 2_frequency_analysis.ipynb
│ ├── 3_baseline_modeling-automl.ipynb
│ └── 3_baseline_modeling.ipynb
├── 6_MachineLearningForIIoT
├── OperationalizeBatch.md
├── OperationalizeEdge.md
├── OperationalizeNearRealTime.md
├── README.md
├── ml-pipelines
│ ├── 01-build-retrain-pipeline.ipynb
│ ├── 02-build-prediction-pipeline.ipynb
│ ├── build-datasets.py
│ ├── build-model.py
│ └── predict.py
├── powerbi
│ └── PredictionResults.pbix
└── synapse-pipelines
│ ├── QualityPrediction_Pipeline_Template.zip
│ └── Read Quality Prediction CSV.ipynb
├── 7_ImageRecognitionForIIoT
└── README.md
├── 8_DeepReinforcementLearningForIIoT
└── README.md
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE
├── LICENSE.md
├── README.md
├── images
├── adx-dashboard-1.png
├── adx-dashboard-2.png
├── adx-dashboard-3.png
├── adx-dashboard-4.png
├── adx-dashboard-5.png
├── adx-dashboard-6.png
├── adx-iothub-connection.png
├── adx-query1.png
├── adx-query2.png
├── adx-webui.png
├── alert-1.png
├── alert-2.png
├── alert-3.png
├── alert-4.png
├── alert-5.png
├── alert-6.png
├── connectivity-sample.png
├── eflow-connect.png
├── eflow-opcpublisher.png
├── iiot-maturity.png
├── iothub-access.png
├── iothub-route-1.png
├── iothub-route-2.png
├── iothub-route-3.png
├── kepware-certs.png
├── kepware-endpoints.png
├── kepware-tags.png
├── ma-add-feed-1.png
├── ma-add-feed-2.png
├── ma-add-feed-3.png
├── ma-add-feed-4.png
├── ma-add-feed-5.png
├── ma-anomaly-1.png
├── ma-anomaly-2.png
├── ma-anomaly-3.png
├── ma-anomaly-4.png
├── ma-anomaly-5.png
├── ma-db-permissions.png
├── ml-baseline-1.png
├── ml-datastore.png
├── ml-eda-1.png
├── ml-eda-2.png
├── ml-eda-3.png
├── ml-eda-4.png
├── ml-eda-freq.png
├── ml-eda.png
├── ml-model-predict-1.png
├── ml-model-predict-2.png
├── ml-model-predict-3.png
├── ml-model-train-1.png
├── ml-model-train-2.png
├── ml-model-train-3.png
├── ml-notebookupload.png
├── ml-ops-1.png
├── ml-ops-batch.png
├── ml-qualitydata.png
├── ml-raw-dataset.png
├── ml-simulatedsensors-1.png
├── ml-simulatedsensors-2.png
├── ml-simulatedsensors-3.png
├── ml-simulatedsensors-4.png
├── oee-pbi-1.png
├── oee-pbi-2.png
├── oee.png
├── operational-visibility-sample.png
├── powerbi-1.png
├── powerbi-2.png
├── powerbi-3.png
├── sparkpool-1.png
├── sparkpool-2.png
├── sparkpool-3.png
├── synapse-1.png
├── synapse-2.png
├── synapse-3.png
├── synapse-4.png
├── synapse-5.png
├── synapse-6.png
├── ts-query1.png
└── ts-query2.png
└── other
├── CertificateCeremony.md
└── openssl_root_ca.cnf
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Microsoft Open Source Code of Conduct
2 |
3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4 |
5 | Resources:
6 |
7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
10 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
4 | > Please provide us with the following information:
5 | > ---------------------------------------------------------------
6 |
7 | ### This issue is for a: (mark with an `x`)
8 | ```
9 | - [ ] bug report -> please search issues before submitting
10 | - [ ] feature request
11 | - [ ] documentation issue or request
12 | - [ ] regression (a behavior that used to work and stopped in a new release)
13 | ```
14 |
15 | ### Minimal steps to reproduce
16 | >
17 |
18 | ### Any log messages given by the failure
19 | >
20 |
21 | ### Expected/desired behavior
22 | >
23 |
24 | ### OS and Version?
25 | > Windows 7, 8 or 10. Linux (which distribution). macOS (Yosemite? El Capitan? Sierra?)
26 |
27 | ### Versions
28 | >
29 |
30 | ### Mention any other details that might be useful
31 |
32 | > ---------------------------------------------------------------
33 | > Thanks! We'll be in touch soon.
34 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Purpose
2 |
3 | * ...
4 |
5 | ## Does this introduce a breaking change?
6 |
7 | ```
8 | [ ] Yes
9 | [ ] No
10 | ```
11 |
12 | ## Pull Request Type
13 | What kind of change does this Pull Request introduce?
14 |
15 |
16 | ```
17 | [ ] Bugfix
18 | [ ] Feature
19 | [ ] Code style update (formatting, local variables)
20 | [ ] Refactoring (no functional changes, no api changes)
21 | [ ] Documentation content changes
22 | [ ] Other... Please describe:
23 | ```
24 |
25 | ## How to Test
26 | * Get the code
27 |
28 | ```
29 | git clone [repo-address]
30 | cd [repo-name]
31 | git checkout [branch-name]
32 | npm install
33 | ```
34 |
35 | * Test the code
36 |
37 | ```
38 | ```
39 |
40 | ## What to Check
41 | Verify that the following are valid
42 | * ...
43 |
44 | ## Other Information
45 |
--------------------------------------------------------------------------------
/1_Connectivity/01_Install_Features.ps1:
--------------------------------------------------------------------------------
1 | Install-WindowsFeature -Name Hyper-V -IncludeManagementTools
2 | Enable-WindowsOptionalFeature -Online -FeatureName "VirtualMachinePlatform" -NoRestart
3 | Restart-Computer
--------------------------------------------------------------------------------
/1_Connectivity/02_Create_VirtualSwitch.ps1:
--------------------------------------------------------------------------------
1 | # ------------------ Configure Networking ---------------------- #
2 | # Create Virtual Switch
3 | New-VMSwitch -Name "Default Switch" -SwitchType Internal
4 |
5 | #Get Index
6 | $InterfaceIndex = (Get-NetAdapter -Name "vEthernet (Default Switch)").ifIndex
7 |
8 | do {
9 | Start-Sleep -Seconds 10
10 | $IPAddress = (Get-NetIPAddress -AddressFamily IPv4 -InterfaceIndex $InterfaceIndex).IPAddress
11 | } while($null -eq $IPAddress)
12 |
13 | # Configure other IPs
14 | $octets = $IPAddress -split "\."
15 | $octets[3] = 1
16 | $GatewayIP = $octets -join "."
17 | $octets[3] = 0
18 | $NatIP = $octets -join "."
19 | $octets[3] = 100
20 | $StartIP = $octets -join "."
21 | $octets[3] = 200
22 | $EndIP = $octets -join "."
23 | $InternalIPInterfaceAddressPrefix = $NatIP + "/24"
24 |
25 | # Set Gateway IP Address
26 | New-NetIPAddress -IPAddress $GatewayIP -PrefixLength 24 -InterfaceIndex $InterfaceIndex
27 |
28 | # Create Nat
29 | New-NetNat -Name "Default Switch" -InternalIPInterfaceAddressPrefix $InternalIPInterfaceAddressPrefix
30 |
31 | # Install DHCP Server
32 | Install-WindowsFeature -Name 'DHCP' -IncludeManagementTools
33 |
34 | # Add
35 | netsh dhcp add securitygroups
36 | Restart-Service dhcpserver
37 |
38 | # Add the DHCP Server to the default local security groups and restart the server.
39 | Add-DhcpServerV4Scope -Name "AzureIoTEdgeScope" -StartRange $StartIP -EndRange $EndIP -SubnetMask 255.255.255.0 -State Active
40 |
41 | # Assign the NAT and gateway IP addresses you created in the earlier section to the DHCP server, and restart the server to load the configuration.
42 | Set-DhcpServerV4OptionValue -ScopeID $NatIP -Router $GatewayIP
43 | Restart-service dhcpserver
--------------------------------------------------------------------------------
/1_Connectivity/03_Install_EFLOW.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [string]$CONNECTION_STRING
3 | )
4 |
5 | # ------------------ EFLOW ---------------------- #
6 | #Run each of the following commands to download IoT Edge for Linux on Windows
7 | $msiPath = $([io.Path]::Combine($env:TEMP, 'AzureIoTEdge.msi'))
8 | $ProgressPreference = 'SilentlyContinue'
9 | Invoke-WebRequest "https://aka.ms/AzEFLOWMSI-CR-X64" -OutFile $msiPath
10 |
11 | #Install IoT Edge for Linux on Windows on your device.
12 | Start-Process -Wait msiexec -ArgumentList "/i","$([io.Path]::Combine($env:TEMP, 'AzureIoTEdge.msi'))","/qn"
13 |
14 | #Create Linux virtual machine and installs the IoT Edge runtime for you.
15 | Deploy-Eflow -acceptEula yes -acceptOptionalTelemetry no
16 |
17 | # Provision EFLOW
18 | Provision-EflowVm -provisioningType ManualConnectionString -devConnString $CONNECTION_STRING
--------------------------------------------------------------------------------
/1_Connectivity/04_Copy_OPCConfig.ps1:
--------------------------------------------------------------------------------
1 | # Copy pn.json file to EFLOW VM
2 | Invoke-EflowVmCommand "mkdir -p opcconfig"
3 | Copy-EflowVMFile -fromFile "opcconfig.json" -toFile ~\opcconfig\opcconfig.json -pushFile
4 |
--------------------------------------------------------------------------------
/1_Connectivity/BasicQueries.kql:
--------------------------------------------------------------------------------
1 | telemetry
2 | | order by SourceTimestamp desc
3 | | take 100
4 |
5 | // Plot all Sensors in all Lines
6 | // Last 1 hours with 1 minute grouping
7 | let _startTime = now()-1h;
8 | let _endTime = now();
9 | telemetry
10 | | make-series num=avg(tolong(Value)) default=0 on SourceTimestamp in range(_startTime, _endTime, 1m) by ExpandedNodeId
11 | | render timechart
12 |
13 |
14 | // Plot all Sensors in Line 1
15 | let _startTime = now()-1hr;
16 | let _endTime = now();
17 | let temptags = telemetry
18 | | distinct ExpandedNodeId
19 | | where ExpandedNodeId contains ("Line1")
20 | | project ExpandedNodeId;
21 | telemetry
22 | | where ExpandedNodeId in (temptags)
23 | | project
24 | SourceTimestamp, Tag = replace_string(ExpandedNodeId,"nsu=KEPServerEX;s=Simulator.Line1.",""),
25 | yaxis = todouble(Value)
26 | | make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime,_endTime, 1m) by Tag
27 | | render timechart
28 |
29 |
30 |
31 | // Plot all Humidity Sensors
32 | // Last 30 minutes with 10 second grouping
33 | let _startTime = now()-30m;
34 | let _endTime = now();
35 | let temptags = telemetry
36 | | distinct ExpandedNodeId
37 | | where ExpandedNodeId contains ("Humidity")
38 | | project ExpandedNodeId;
39 | telemetry
40 | | where ExpandedNodeId in (temptags)
41 | | project SourceTimestamp, ExpandedNodeId, yaxis = todouble(substring(Value,0,2))
42 | | make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime, _endTime, 10s) by ExpandedNodeId
43 | | render timechart
44 |
45 |
46 | // Management Queries
47 |
48 | .show ingestion failures
49 |
--------------------------------------------------------------------------------
/1_Connectivity/SetupDataExplorer.kql:
--------------------------------------------------------------------------------
1 | .create table opcua_raw(payload: dynamic)
2 |
3 | .create table opcua_raw ingestion json mapping "opcua_mapping" @'[{"column":"payload","path":"$","datatype":"dynamic"}]'
4 |
5 | .create table telemetry (DataSetWriterID: string, ExpandedNodeId: string, Value: dynamic, SourceTimestamp: datetime)
6 |
7 | .create-or-alter function ExpandOpcUaRaw() {
8 | opcua_raw
9 | |mv-expand records = payload.Messages
10 | |project
11 | DataSetWriterID = tostring(records["DataSetWriterId"]),
12 | Payload = todynamic(records["Payload"])
13 | | mv-apply Payload on (
14 | extend key = tostring(bag_keys(Payload)[0])
15 | | extend p = Payload[key]
16 | | project ExpandedNodeId = key, Value = todynamic(p.Value), SourceTimestamp = todatetime(p.SourceTimestamp)
17 | )
18 | }
19 |
20 | .alter table telemetry policy update @'[{"Source": "opcua_raw", "Query": "ExpandOpcUaRaw()", "IsEnabled": "True"}]'
21 |
22 |
23 | //------------------------------------------------------------------------------------------
24 | // Handling multiple update polices for different data schemas across devices
25 |
26 | // example
27 | // {"messageId":1,"deviceId":"Raspberry Pi Web Client","temperature":"28.32278981756135","humidity":"67.9518300363803"}
28 |
29 | // opcua_raw
30 | // | where payload.messageId != ""
31 | // | project Payload = todynamic(payload), DeviceId = payload.deviceId
32 | // | mv-apply Payload on (
33 | // extend key = tostring(bag_keys(Payload)[0])
34 | // | extend p = Payload[key]
35 | // | where key !in ('messageId','deviceId')
36 | // | project DeviceId = DeviceId, SensorNodeId = key, Value = p, SourceTimestamp = now()
37 | // )
38 |
39 | // opcua_raw
40 | // | where payload.messageId == ""
41 | // |mv-expand records = payload.Messages
42 | // |project
43 | // DeviceId = tostring(records["DataSetWriterId"]),
44 | // Payload = todynamic(records["Payload"])
45 | // | mv-apply Payload on (
46 | // extend key = tostring(bag_keys(Payload)[0])
47 | // | extend p = Payload[key]
48 | // | project SensorNodeId = key, Value = todynamic(p.Value), SourceTimestamp = todatetime(p.SourceTimestamp)
49 | // )
--------------------------------------------------------------------------------
/1_Connectivity/moduleContent.json:
--------------------------------------------------------------------------------
1 | {
2 | "content": {
3 | "modulesContent": {
4 | "$edgeAgent": {
5 | "properties.desired": {
6 | "schemaVersion": "1.1",
7 | "runtime": {
8 | "type": "docker",
9 | "settings": {
10 | "minDockerVersion": "v1.25",
11 | "loggingOptions": "",
12 | "registryCredentials": {}
13 | }
14 | },
15 | "systemModules": {
16 | "edgeAgent": {
17 | "type": "docker",
18 | "settings": {
19 | "image": "mcr.microsoft.com/azureiotedge-agent:1.1",
20 | "createOptions": "{}"
21 | },
22 | "env": {
23 | "UpstreamProtocol": {
24 | "value": "AmqpWs"
25 | }
26 | }
27 | },
28 | "edgeHub": {
29 | "type": "docker",
30 | "status": "running",
31 | "restartPolicy": "always",
32 | "settings": {
33 | "image": "mcr.microsoft.com/azureiotedge-hub:1.1",
34 | "createOptions": "{\"HostConfig\":{\"PortBindings\":{\"5671/tcp\":[{\"HostPort\":\"5671\"}],\"8883/tcp\":[{\"HostPort\":\"8883\"}],\"443/tcp\":[{\"HostPort\":\"443\"}]}}}"
35 | },
36 | "env": {
37 | "UpstreamProtocol": {
38 | "value": "AmqpWs"
39 | }
40 | }
41 | }
42 | },
43 | "modules": {
44 | "OPCPublisher": {
45 | "version": "1.0",
46 | "type": "docker",
47 | "status": "running",
48 | "restartPolicy": "always",
49 | "settings": {
50 | "image": "mcr.microsoft.com/iotedge/opc-publisher:2.8.0",
51 | "createOptions": "{\"Hostname\":\"publisher\",\"Cmd\":[\"PkiRootPath=/opcdata/pki\",\"--lf=/opcdata/publisher.log\",\"--pf=/opcdata/opcconfig.json\",\"--aa\",\"--di=60\",\"--me=Json\",\"--mm=PubSub\"],\"HostConfig\":{\"Binds\":[\"/home/iotedge-user/opcconfig:/opcdata\"]}}"
52 | }
53 | }
54 | }
55 | }
56 | },
57 | "$edgeHub": {
58 | "properties.desired": {
59 | "schemaVersion": "1.1",
60 | "routes": {
61 | "upstream": "FROM /messages/* INTO $upstream"
62 | },
63 | "storeAndForwardConfiguration": {
64 | "timeToLiveSecs": 7200
65 | }
66 | }
67 | },
68 | "OPCPublisher": {
69 | "properties.desired": {
70 | "routes": {
71 | "upstream": "FROM /messages/* INTO $upstream"
72 | }
73 | }
74 | }
75 | }
76 | }
77 | }
--------------------------------------------------------------------------------
/1_Connectivity/opcconfig.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "EndpointUrl": "opc.tcp://10.0.0.4:49320",
4 | "UseSecurity": true,
5 | "OpcAuthenticationMode": "UsernamePassword",
6 | "OpcAuthenticationUsername": "",
7 | "OpcAuthenticationPassword": "",
8 | "OpcNodes": [
9 | {
10 | "Id": "ns=2;s=Simulator.Line1.Humidity"
11 | },
12 | {
13 | "Id": "ns=2;s=Simulator.Line1.Temperature"
14 | },
15 | {
16 | "Id": "ns=2;s=Simulator.Line1.Speed"
17 | },
18 | {
19 | "Id": "ns=2;s=Simulator.Line2.Humidity"
20 | },
21 | {
22 | "Id": "ns=2;s=Simulator.Line2.Temperature"
23 | },
24 | {
25 | "Id": "ns=2;s=Simulator.Line2.Speed"
26 | },
27 | {
28 | "Id": "ns=2;s=Simulator.Line3.Humidity"
29 | },
30 | {
31 | "Id": "ns=2;s=Simulator.Line3.Temperature"
32 | },
33 | {
34 | "Id": "ns=2;s=Simulator.Line3.Speed"
35 | },
36 | {
37 | "Id": "ns=2;s=Simulator.Line4.Humidity"
38 | },
39 | {
40 | "Id": "ns=2;s=Simulator.Line4.Temperature"
41 | },
42 | {
43 | "Id": "ns=2;s=Simulator.Line4.Speed"
44 | }
45 | ]
46 | }
47 | ]
--------------------------------------------------------------------------------
/2_OperationalVisibility/TimeSeriesQueries.kql:
--------------------------------------------------------------------------------
1 | // Plot Temperature Sensor for Line 1 with Seasonal, Trend, and Residual components.
2 | // Last 1 hour with 1 minute aggregates
3 | let _startTime = now()-1hr;
4 | let _endTime = now();
5 | let stepAggregate = 1m;
6 | let temptags = telemetry
7 | | distinct ExpandedNodeId
8 | | where ExpandedNodeId contains ("Temperature")
9 | | project ExpandedNodeId;
10 | telemetry
11 | | where ExpandedNodeId in (temptags) and ExpandedNodeId contains "Line1"
12 | | project SourceTimestamp, Tag = replace_string(ExpandedNodeId,"nsu=KEPServerEX;s=Simulator.Line1",""), yaxis = toint(Value)
13 | | make-series num=avg(yaxis) default=0 on SourceTimestamp from _startTime to _endTime step stepAggregate by Tag
14 | | extend (baseline, seasonal, trend, residual) = series_decompose(num, -1, 'linefit')
15 | | project Baseline = baseline, Seasonal = seasonal, Trend = trend, Average=num, SourceTimestamp
16 | | render timechart with(title='Temperature decomposition with 1 minute granularity', ysplit=panels)
17 |
18 |
19 | // Plot Anomalies for Humidity Sensor on Line 1
20 | let _startTime = now()-1hr;
21 | let _endTime = now();
22 | let stepAggregate = 1m;
23 | let temptags = telemetry
24 | | distinct ExpandedNodeId
25 | | where ExpandedNodeId contains ("Humidity")
26 | | project ExpandedNodeId;
27 | telemetry
28 | | where ExpandedNodeId in (temptags) and ExpandedNodeId contains "Line1"
29 | | project SourceTimestamp, Tag = replace_string(ExpandedNodeId,"nsu=KEPServerEX;s=Simulator.Line1",""), yaxis = todouble(Value)
30 | | make-series num=avg(yaxis) default=0 on SourceTimestamp from _startTime to _endTime step stepAggregate by Tag
31 | | extend series_decompose_anomalies(num,1)
32 | | extend series_decompose_anomalies_num_ad_flag = series_multiply(10, series_decompose_anomalies_num_ad_flag)
33 | | project
34 | AverageValue = num,
35 | AnomalyFlags = series_decompose_anomalies_num_ad_flag,
36 | AnomalyDetectionScore = series_decompose_anomalies_num_ad_score,
37 | AnomalyDetectionBaseLine = series_decompose_anomalies_num_baseline,
38 | SourceTimestamp
39 | | render timechart
40 |
41 |
42 | // Plot all Tags
43 | let _startTime = now()-1hr;
44 | let _endTime = now();
45 | let temptags = telemetry
46 | | distinct ExpandedNodeId
47 | | where ExpandedNodeId contains ("Line1")
48 | | project ExpandedNodeId;
49 | telemetry
50 | | where ExpandedNodeId in (temptags)
51 | | project
52 | SourceTimestamp, Tag = replace_string(ExpandedNodeId,"nsu=KEPServerEX;s=Simulator.Line1.",""),
53 | yaxis = todouble(Value)
54 | | make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime,_endTime, 1m) by Tag
55 | | render timechart
--------------------------------------------------------------------------------
/2_OperationalVisibility/anomaly-alert-workflow.json:
--------------------------------------------------------------------------------
1 | {
2 | "definition": {
3 | "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
4 | "actions": {
5 | "Create_HTML_table": {
6 | "inputs": {
7 | "columns": [
8 | {
9 | "header": "metricGuid",
10 | "value": "@item()?['metricId']"
11 | },
12 | {
13 | "header": "timestamp",
14 | "value": "@item()?['startTime']"
15 | },
16 | {
17 | "header": "dimension",
18 | "value": "@item()?['rootNode']?['dimension']"
19 | },
20 | {
21 | "header": "property",
22 | "value": "@item()?['property']"
23 | }
24 | ],
25 | "format": "HTML",
26 | "from": "@body('Parse_JSON')?['value']"
27 | },
28 | "runAfter": {
29 | "Parse_JSON": [
30 | "Succeeded"
31 | ]
32 | },
33 | "type": "Table"
34 | },
35 | "HTTP": {
36 | "inputs": {
37 | "headers": {
38 | "Ocp-Apim-Subscription-Key": "",
39 | "x-api-key": ""
40 | },
41 | "method": "GET",
42 | "uri": "@{triggerBody()?['value'][0]['callBackUrl']}"
43 | },
44 | "runAfter": {},
45 | "type": "Http"
46 | },
47 | "Parse_JSON": {
48 | "inputs": {
49 | "content": "@body('HTTP')",
50 | "schema": {
51 | "properties": {
52 | "value": {
53 | "items": {
54 | "properties": {
55 | "anomalyDetectionConfigurationId": {
56 | "type": "string"
57 | },
58 | "dataFeedId": {
59 | "type": "string"
60 | },
61 | "incidentId": {
62 | "type": "string"
63 | },
64 | "lastTime": {
65 | "type": "string"
66 | },
67 | "metricId": {
68 | "type": "string"
69 | },
70 | "property": {
71 | "properties": {
72 | "expectedValueOfRootNode": {
73 | "type": "integer"
74 | },
75 | "incidentStatus": {
76 | "type": "string"
77 | },
78 | "maxSeverity": {
79 | "type": "string"
80 | },
81 | "valueOfRootNode": {
82 | "type": "number"
83 | }
84 | },
85 | "type": "object"
86 | },
87 | "rootNode": {
88 | "properties": {
89 | "dimension": {
90 | "properties": {
91 | "Capability": {
92 | "type": "string"
93 | },
94 | "Device": {
95 | "type": "string"
96 | }
97 | },
98 | "type": "object"
99 | }
100 | },
101 | "type": "object"
102 | },
103 | "startTime": {
104 | "type": "string"
105 | }
106 | },
107 | "required": [
108 | "dataFeedId",
109 | "metricId",
110 | "anomalyDetectionConfigurationId",
111 | "incidentId",
112 | "startTime",
113 | "lastTime",
114 | "rootNode",
115 | "property"
116 | ],
117 | "type": "object"
118 | },
119 | "type": "array"
120 | }
121 | },
122 | "type": "object"
123 | }
124 | },
125 | "runAfter": {
126 | "HTTP": [
127 | "Succeeded"
128 | ]
129 | },
130 | "type": "ParseJson"
131 | },
132 | "Send_an_email_(V2)": {
133 | "inputs": {
134 | "body": {
135 | "Body": "
@{body('Create_HTML_table')}
",
136 | "Subject": "Anomaly Alert",
137 | "To": ""
138 | },
139 | "host": {
140 | "connection": {
141 | "name": "@parameters('$connections')['office365']['connectionId']"
142 | }
143 | },
144 | "method": "post",
145 | "path": "/v2/Mail"
146 | },
147 | "runAfter": {
148 | "Create_HTML_table": [
149 | "Succeeded"
150 | ]
151 | },
152 | "type": "ApiConnection"
153 | }
154 | },
155 | "contentVersion": "1.0.0.0",
156 | "outputs": {},
157 | "parameters": {
158 | "$connections": {
159 | "defaultValue": {},
160 | "type": "Object"
161 | }
162 | },
163 | "triggers": {
164 | "manual": {
165 | "inputs": {
166 | "method": "POST",
167 | "schema": {
168 | "properties": {
169 | "value": {
170 | "items": {
171 | "properties": {
172 | "alertInfo": {
173 | "properties": {
174 | "alertId": {
175 | "type": "string"
176 | },
177 | "anomalyAlertingConfigurationId": {
178 | "type": "string"
179 | },
180 | "createdTime": {
181 | "type": "string"
182 | },
183 | "modifiedTime": {
184 | "type": "string"
185 | },
186 | "timestamp": {
187 | "type": "string"
188 | }
189 | },
190 | "type": "object"
191 | },
192 | "alertType": {
193 | "type": "string"
194 | },
195 | "callBackUrl": {
196 | "type": "string"
197 | },
198 | "hookId": {
199 | "type": "string"
200 | }
201 | },
202 | "required": [
203 | "hookId",
204 | "alertType",
205 | "alertInfo",
206 | "callBackUrl"
207 | ],
208 | "type": "object"
209 | },
210 | "type": "array"
211 | }
212 | },
213 | "type": "object"
214 | }
215 | },
216 | "kind": "Http",
217 | "type": "Request"
218 | }
219 | }
220 | },
221 | "parameters": {
222 | "$connections": {
223 | "value": {
224 | "office365": {
225 | "connectionId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/iiotsample/providers/Microsoft.Web/connections/office365",
226 | "connectionName": "office365",
227 | "id": "/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Web/locations/westus2/managedApis/office365"
228 | }
229 | }
230 | }
231 | }
232 | }
--------------------------------------------------------------------------------
/2_OperationalVisibility/iiot-operational-visibility-dashboard.json:
--------------------------------------------------------------------------------
1 | {"id":"5628c7f7-a684-4170-9f2c-5e742301e96a","eTag":"Ogxx96AcQoLUTEXZIgR1pg==","title":"IIoT Operational Visibility ","tiles":[{"id":"5d265571-ca35-497b-8f9f-3cda4b93e551","title":"Average Value","query":"telemetry\n| where ExpandedNodeId contains strcat(Line,'.',SensorTag)\n| project SourceTimestamp, yaxis = toint(Value)\n| make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime, _endTime, 1m)","dataSourceId":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","visualType":"line","pageId":"b34de491-da14-4ea4-a39f-7c19954f072f","layout":{"x":0,"y":8,"width":24,"height":8},"visualOptions":{"hideTileTitle":false,"multipleYAxes":{"base":{"id":"-1","label":"Average per minute","columns":[],"yAxisMaximumValue":null,"yAxisMinimumValue":null,"yAxisScale":"linear","horizontalLines":[]},"additional":[]},"hideLegend":true,"xColumnTitle":"Time","xColumn":{"type":"infer"},"yColumns":{"type":"specified","value":["num"]},"seriesColumns":{"type":"infer"},"xAxisScale":"linear","verticalLine":"","crossFilterDisabled":false,"crossFilter":[{"interaction":"dragX","property":"timeRange","parameterId":"09149602-4f92-4a7a-bda6-48b5b9aba418","disabled":false}]},"usedParamVariables":["Line","SensorTag","_endTime","_startTime"]},{"id":"8cc4cc31-5aa4-42c9-a2d3-5a88074e5711","title":"Anomalies (Last 2-hour window)","query":"let stepAggregate = 1m;\nlet temptags = telemetry\n| distinct ExpandedNodeId\n| where ExpandedNodeId contains (SensorTag)\n| project ExpandedNodeId;\ntelemetry\n| where ExpandedNodeId in (temptags) and ExpandedNodeId contains Line\n| project SourceTimestamp, Tag = replace_string(ExpandedNodeId,\"nsu=KEPServerEX;s=Simulator.Line1\",\"\"), yaxis = todouble(Value)\n| make-series num=avg(yaxis) default=0 on SourceTimestamp from (now() - 2h) to now() step stepAggregate by Tag\n| extend series_decompose_anomalies(num,1.5) // very senstive\n| extend series_decompose_anomalies_num_ad_flag = series_multiply(series_multiply(0.5,num), series_decompose_anomalies_num_ad_flag)\n| project \n AverageValue = num, \n AnomalyFlags = series_decompose_anomalies_num_ad_flag, \n AnomalyDetectionScore = series_decompose_anomalies_num_ad_score,\n AnomalyDetectionBaseLine = series_decompose_anomalies_num_baseline,\n SourceTimestamp","dataSourceId":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","visualType":"timechart","pageId":"b34de491-da14-4ea4-a39f-7c19954f072f","layout":{"x":0,"y":0,"width":24,"height":8},"visualOptions":{"hideTileTitle":false,"multipleYAxes":{"base":{"id":"-1","label":"Average per minute","columns":[],"yAxisMaximumValue":null,"yAxisMinimumValue":null,"yAxisScale":"linear","horizontalLines":[]},"additional":[]},"hideLegend":false,"xColumnTitle":"Time","xColumn":{"type":"infer"},"yColumns":{"type":"infer"},"seriesColumns":{"type":"infer"},"xAxisScale":"linear","verticalLine":"","crossFilterDisabled":false,"crossFilter":[{"interaction":"dragX","property":"timeRange","parameterId":"09149602-4f92-4a7a-bda6-48b5b9aba418","disabled":false}]},"usedParamVariables":["Line","SensorTag"]},{"id":"1622bd6b-a877-4623-95a8-9186cad0c401","title":"Average Humidity Value","query":"telemetry\n| where ExpandedNodeId contains strcat(Line,'.Humidity')\n| project SourceTimestamp, yaxis = toint(Value)\n| make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime, _endTime, 1m)","dataSourceId":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","visualType":"timechart","pageId":"e8f25edf-fe53-4d2d-81f9-911eb913724e","layout":{"x":0,"y":0,"width":24,"height":5},"visualOptions":{"hideTileTitle":false,"multipleYAxes":{"base":{"id":"-1","label":"Average per minute","columns":[],"yAxisMaximumValue":null,"yAxisMinimumValue":null,"yAxisScale":"linear","horizontalLines":[]},"additional":[]},"hideLegend":true,"xColumnTitle":"Time","xColumn":{"type":"infer"},"yColumns":{"type":"specified","value":["num"]},"seriesColumns":{"type":"infer"},"xAxisScale":"linear","verticalLine":"","crossFilterDisabled":false,"crossFilter":[{"interaction":"dragX","property":"timeRange","parameterId":"09149602-4f92-4a7a-bda6-48b5b9aba418","disabled":false}]},"usedParamVariables":["Line","_endTime","_startTime"]},{"id":"7630d0ba-3dd1-4209-95e0-313b4de35d5e","title":"Average of all Tags","query":"telemetry\n| where ExpandedNodeId contains strcat(Line)\n| project SourceTimestamp, yaxis = toint(Value), Tag = replace_string(ExpandedNodeId,\"nsu=KEPServerEX;s=Simulator.Line1.\",\"\")\n| make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime, _endTime, 1m) by Tag","dataSourceId":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","visualType":"timechart","pageId":"e8f25edf-fe53-4d2d-81f9-911eb913724e","layout":{"x":0,"y":15,"width":24,"height":8},"visualOptions":{"hideTileTitle":false,"multipleYAxes":{"base":{"id":"-1","label":"Average per minute","columns":[],"yAxisMaximumValue":null,"yAxisMinimumValue":null,"yAxisScale":"linear","horizontalLines":[]},"additional":[]},"hideLegend":false,"xColumnTitle":"Time","xColumn":{"type":"infer"},"yColumns":{"type":"infer"},"seriesColumns":{"type":"infer"},"xAxisScale":"linear","verticalLine":"","crossFilterDisabled":false,"crossFilter":[{"interaction":"dragX","property":"timeRange","parameterId":"09149602-4f92-4a7a-bda6-48b5b9aba418","disabled":false}]},"usedParamVariables":["Line","_endTime","_startTime"]},{"id":"0c28c0f6-b39d-4fa3-b945-094b51d51e9c","title":"Average Temperature Value","query":"telemetry\n| where ExpandedNodeId contains strcat(Line,'.Temperature')\n| project SourceTimestamp, yaxis = toint(Value)\n| make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime, _endTime, 1m)","dataSourceId":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","visualType":"timechart","pageId":"e8f25edf-fe53-4d2d-81f9-911eb913724e","layout":{"x":0,"y":5,"width":24,"height":5},"visualOptions":{"hideTileTitle":false,"multipleYAxes":{"base":{"id":"-1","label":"Average per minute","columns":[],"yAxisMaximumValue":null,"yAxisMinimumValue":null,"yAxisScale":"linear","horizontalLines":[]},"additional":[]},"hideLegend":true,"xColumnTitle":"Time","xColumn":{"type":"infer"},"yColumns":{"type":"specified","value":["num"]},"seriesColumns":{"type":"infer"},"xAxisScale":"linear","verticalLine":"","crossFilterDisabled":false,"crossFilter":[{"interaction":"dragX","property":"timeRange","parameterId":"09149602-4f92-4a7a-bda6-48b5b9aba418","disabled":false}]},"usedParamVariables":["Line","_endTime","_startTime"]},{"id":"eee4ac50-596d-4c54-9d0b-1248bc9bfe6c","title":"Average Speed Value","query":"telemetry\n| where ExpandedNodeId contains strcat(Line,'.Speed')\n| project SourceTimestamp, yaxis = toint(Value)\n| make-series num=avg(yaxis) default=0 on SourceTimestamp in range(_startTime, _endTime, 1m)","dataSourceId":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","visualType":"timechart","pageId":"e8f25edf-fe53-4d2d-81f9-911eb913724e","layout":{"x":0,"y":10,"width":24,"height":5},"visualOptions":{"hideTileTitle":false,"multipleYAxes":{"base":{"id":"-1","label":"Average per minute","columns":[],"yAxisMaximumValue":null,"yAxisMinimumValue":null,"yAxisScale":"linear","horizontalLines":[]},"additional":[]},"hideLegend":true,"xColumnTitle":"Time","xColumn":{"type":"infer"},"yColumns":{"type":"specified","value":["num"]},"seriesColumns":{"type":"infer"},"xAxisScale":"linear","verticalLine":"","crossFilterDisabled":false,"crossFilter":[{"interaction":"dragX","property":"timeRange","parameterId":"09149602-4f92-4a7a-bda6-48b5b9aba418","disabled":false}]},"usedParamVariables":["Line","_endTime","_startTime"]},{"id":"b7f31a9d-f548-4235-bc42-a7aa8ff6be8d","title":"Machine Status","query":"telemetry\n| where ExpandedNodeId contains Line and ExpandedNodeId contains \"Status\"\n| where SourceTimestamp > _startTime and SourceTimestamp < _endTime\n| order by SourceTimestamp asc\n| extend prevValue = prev(Value), prevTimestamp = next(SourceTimestamp)\n| where prevValue != todouble(Value)\n| project ExpandedNodeId, Value, SourceTimestamp\n| extend nextValue = next(Value), nextTimestamp = next(SourceTimestamp,1,now())\n| where nextValue != todouble(Value)\n| project Duration = todouble(datetime_diff(\"minute\",nextTimestamp,SourceTimestamp)), Value = tostring(Value)\n| summarize sum(Duration) by Value\n| extend Status=replace(@'1', @'Running', Value)\n| extend Status=replace(@'2', @'Starting', Status)\n| extend Status=replace(@'3', @'Idle', Status)\n| extend Status=replace(@'4', @'Stopped', Status)\n| project Status, TotalDurationInMinutes = sum_Duration","dataSourceId":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","visualType":"pie","pageId":"2d4cd52b-c433-44b9-881b-815e62f22740","layout":{"x":0,"y":0,"width":12,"height":9},"visualOptions":{"hideTileTitle":false,"hideLegend":false,"xColumn":{"type":"specified","value":"Status"},"yColumns":{"type":"specified","value":["TotalDurationInMinutes"]},"seriesColumns":{"type":"infer"},"crossFilterDisabled":false,"labelDisabled":false,"pie__label":["name","percentage"],"tooltipDisabled":false,"pie__tooltip":["name","percentage","value"],"pie__orderBy":"size","pie__kind":"donut","crossFilter":[]},"usedParamVariables":["Line","_endTime","_startTime"]}],"dataSources":[{"id":"b430e1fb-8f43-42ce-8807-e324f214e951","name":"nsnprod","clusterUri":"https://nsnadxprod.eastus.kusto.windows.net/","database":"nsnfrigeriodb","kind":"manual-kusto","scopeId":"kusto"},{"id":"6ef8ba49-eaec-41f2-843c-54f94fe8ec04","name":"IIoT Mfg Dev","clusterUri":"https://iiotmfgdev.westus2.kusto.windows.net/","database":"mfgdb","kind":"manual-kusto","scopeId":"kusto"}],"$schema":"https://dataexplorer.azure.com/static/d/schema/21/dashboard.json","autoRefresh":{"enabled":true,"defaultInterval":"30s","minInterval":"30s"},"parameters":[{"kind":"duration","id":"09149602-4f92-4a7a-bda6-48b5b9aba418","displayName":"Time range","beginVariableName":"_startTime","endVariableName":"_endTime","defaultValue":{"kind":"dynamic","count":1,"unit":"hours"},"showOnPages":{"kind":"all"}},{"kind":"string","id":"2eb00380-ac7a-4c82-8640-fb50237dc467","displayName":"Line","variableName":"Line","selectionType":"single","defaultValue":{"kind":"value","value":"Line1"},"dataSource":{"kind":"static","values":[{"value":"Line1"},{"value":"Line2"},{"value":"Line3"},{"value":"Line4"}]},"showOnPages":{"kind":"all"}},{"kind":"string","id":"b89f1772-6845-4dd5-a633-e3477daa81a5","displayName":"SensorTag","variableName":"SensorTag","selectionType":"single","defaultValue":{"kind":"value","value":"Humidity"},"dataSource":{"kind":"static","values":[{"value":"Humidity"},{"value":"Temperature"},{"value":"Speed"}]},"showOnPages":{"kind":"selection","pageIds":["b34de491-da14-4ea4-a39f-7c19954f072f"]}}],"pages":[{"id":"e8f25edf-fe53-4d2d-81f9-911eb913724e","name":"Line View"},{"id":"b34de491-da14-4ea4-a39f-7c19954f072f","name":"Tag View"},{"name":"Machine Status View","id":"2d4cd52b-c433-44b9-881b-815e62f22740"}],"schema_version":"21"}
--------------------------------------------------------------------------------
/2_OperationalVisibility/opcconfig.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "EndpointUrl": "opc.tcp://10.0.0.4:49320",
4 | "UseSecurity": true,
5 | "OpcAuthenticationMode": "UsernamePassword",
6 | "OpcAuthenticationUsername": "",
7 | "OpcAuthenticationPassword": "",
8 | "OpcNodes": [
9 | {
10 | "Id": "ns=2;s=Simulator.Line1.Humidity"
11 | },
12 | {
13 | "Id": "ns=2;s=Simulator.Line1.Temperature"
14 | },
15 | {
16 | "Id": "ns=2;s=Simulator.Line1.Speed"
17 | },
18 | {
19 | "Id": "ns=2;s=Simulator.Line1.Status"
20 | },
21 | {
22 | "Id": "ns=2;s=Simulator.Line2.Humidity"
23 | },
24 | {
25 | "Id": "ns=2;s=Simulator.Line2.Temperature"
26 | },
27 | {
28 | "Id": "ns=2;s=Simulator.Line2.Speed"
29 | },
30 | {
31 | "Id": "ns=2;s=Simulator.Line2.Status"
32 | },
33 | {
34 | "Id": "ns=2;s=Simulator.Line3.Humidity"
35 | },
36 | {
37 | "Id": "ns=2;s=Simulator.Line3.Temperature"
38 | },
39 | {
40 | "Id": "ns=2;s=Simulator.Line3.Speed"
41 | },
42 | {
43 | "Id": "ns=2;s=Simulator.Line3.Status"
44 | },
45 | {
46 | "Id": "ns=2;s=Simulator.Line4.Humidity"
47 | },
48 | {
49 | "Id": "ns=2;s=Simulator.Line4.Temperature"
50 | },
51 | {
52 | "Id": "ns=2;s=Simulator.Line4.Speed"
53 | },
54 | {
55 | "Id": "ns=2;s=Simulator.Line4.Status"
56 | }
57 | ]
58 | }
59 | ]
--------------------------------------------------------------------------------
/3_OEECalculationEngine/.gitignore:
--------------------------------------------------------------------------------
1 | build/
2 | *.env
3 |
4 | bin
5 | obj
6 | csx
7 | .vs
8 | edge
9 | Publish
10 |
11 | *.user
12 | *.suo
13 | *.cscfg
14 | *.Cache
15 | project.lock.json
16 |
17 | /packages
18 | /TestResults
19 |
20 | /tools/NuGet.exe
21 | /App_Data
22 | /secrets
23 | /data
24 | .secrets
25 | appsettings.json
26 | local.settings.json
27 |
28 | node_modules
29 |
30 | # Local python packages
31 | .python_packages/
32 |
33 | # Python Environments
34 | .env
35 | .venv
36 | env/
37 | venv/
38 | ENV/
39 | env.bak/
40 | venv.bak/
41 |
42 | # Byte-compiled / optimized / DLL files
43 | __pycache__/
44 | *.py[cod]
45 | *$py.class
--------------------------------------------------------------------------------
/3_OEECalculationEngine/README.md:
--------------------------------------------------------------------------------
1 | # Overall Equipment Effectiveness(OEE) and KPI Calculation Engine
2 |
3 | Goal of this sample is to acceleratre deployment of [Industrial IoT Transparency Patterns](https://docs.microsoft.com/en-us/azure/architecture/guide/iiot-patterns/iiot-transparency-patterns). There is no one size fits all solution, as there are many considerations, please review them before moving your workload to production.
4 |
5 | ## High Level Design
6 |
7 | 
8 |
9 | ## Pre-requisites
10 |
11 | - You have [Operational Visibility](../2_OperationalVisibility/README.md) sample working.
12 |
13 | ## Setup SQL Database
14 |
15 | - Create a [Single SQL Database](https://learn.microsoft.com/en-us/azure/azure-sql/database/single-database-create-quickstart?view=azuresql&tabs=azure-cli) using the following az cli commands:
16 | 1. Create a SQL server:
17 | `az sql server create --name iiotsamplesqlserver --resource-group iiotsample --location "West US 2" --admin-user azureuser --admin-password `
18 | 1. Configure the firewall
19 | `az sql server firewall-rule create --resource-group iiosample --server iiotsamplesqlserver -n AllowYourIp --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0`
20 | 1. Create the database
21 | `az sql db create --resource-group iiotsample --server iiotsamplesqlserver --name iiotsamplesqldb --sample-name AdventureWorksLT --edition GeneralPurpose --compute-model Serverless --family Gen5 --capacity 2`
22 |
23 | - Run the [sqldb/mes-reporting.sql](sqldb/mes-reporting.sql) script to create the MES and OEE Reporting tables, along with some sample data
24 |
25 | ## Setup Synapse Workspace
26 |
27 | - Create a [Synapse Workspace](https://docs.microsoft.com/en-us/azure/synapse-analytics/quickstart-create-workspace) with default settings:
28 | 1. Create a Data Lake Storage account
29 | `az storage account create --name iiotsamplestaccount --resource-group iiotsample --location westus2 --sku Standard_RAGRS --kind StorageV2`
30 | 1. Create a container
31 | `az storage container create --name iiotsamplefs --account-name iiotsamplestaccount --auth-mode login`
32 | 1. Create the Synapse workspace
33 | `az synapse workspace create --name iiotsamplesynapsews --resource-group iiotsample --storage-account iiotsamplestaccount --file-system iiotsamplefs --sql-admin-login-user sqladminuser --sql-admin-login-password --location westus2`
34 |
35 | - Create 2 [Linked Services](https://docs.microsoft.com/en-us/azure/data-factory/concepts-linked-services?tabs=data-factory) in Synapse Workspace:
36 |
37 | 1. Download [synapse/sqlLinkedService.json](./synapse/sqlLinkedService.json) and add your SQL password
38 | 1. Download [synapse/adxLinkedService.json](./synapse/adxLinkedService.json) and add tenantId, servicePrincipalId and servicePrincipalKey related to the Azure Data Explorer created in the prerequisites
39 | 1. Link the [SQL Database](https://docs.microsoft.com/en-us/azure/data-factory/connector-azure-sql-database?tabs=data-factory#create-an-azure-sql-database-linked-service-using-ui) created above:
40 | `az synapse linked-service create --workspace-name iiotsamplesynapse --name sqllinkedservice --file @"./sqlLinkedService.json"`
41 | 1. Link [Azure Data Explorer](https://docs.microsoft.com/en-us/azure/data-factory/connector-azure-data-explorer?tabs=data-factory#create-a-linked-service-to-azure-data-explorer-using-ui) created in the prerequisites:
42 | `az synapse linked-service create --workspace-name iiotsamplessynapse --name adxlinkedservice --file @"./adxLinkedService.json"`
43 |
44 | - Upload new Workspace package [package/dist/manufacturingmetrics-0.1.0-py3-none-any.whl](package/dist/manufacturingmetrics-0.1.0-py3-none-any.whl)
45 |
46 |
47 |
48 | - Create a new [Apache Spark Pool](https://learn.microsoft.com/en-us/cli/azure/synapse/spark/pool?view=azure-cli-latest#az-synapse-spark-pool-update):
49 | `az synapse spark pool create --name devtestspark --workspace-name iiotsamplesynapse --resource-group iiotsample --spark-version 2.4 --node-count 3 --node-size Small`
50 |
51 | - Upload the [package/requirements.txt](package/requirements.txt) file, select the workspace package created above and click `Apply`. Wait until the packages are deployed.
52 |
53 |
54 |
55 | ## Calculate OEE using Synapse Notebook
56 |
57 | - Open `Develop` tab, click on `+` and Import the [notebook/CalculateOEE.ipynb](notebook/CalculateOEE.ipynb)
58 |
59 | - Attach the notebook to the spark cluster created above.
60 |
61 | - In first cell, update the values for `sqldbLinkedServiceName` and `kustolinkedServiceName` as created above
62 |
63 | - In second cell, update the `oeeDate` to a date which has telemetry data in Data Explorer.
64 |
65 | - Run both the cells
66 |
67 | - Open SQL Database created above and verify the data in `OEE` table
68 |
69 | ## Visualize OEE in Power BI
70 |
71 | - Open [powerbi/oee.pbix](powerbi/oee.pbix) file and change the `Data Source settings` to connect with the SQL Database created above.
72 |
73 |
74 |
75 |
76 |
77 |
78 | ## Additional Resources
79 |
80 | - Update and ReBuild package
81 | - `cd package`
82 | - `pip install wheel setuptools`
83 | - `python setup.py bdist_wheel`
84 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/notebook/CalculateOEE.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "source": [
6 | "# https://docs.microsoft.com/en-us/azure/synapse-analytics/spark/microsoft-spark-utilities?pivots=programming-language-python\r\n",
7 | "# mssparkutils.credentials.help()\r\n",
8 | "\r\n",
9 | "import json\r\n",
10 | "from azure.kusto.data import KustoClient, KustoConnectionStringBuilder\r\n",
11 | "\r\n",
12 | "sqldbLinkedServiceName = 'iiotmfgsqldb'\r\n",
13 | "sqlConnection = json.loads(mssparkutils.credentials.getPropertiesAll(sqldbLinkedServiceName))\r\n",
14 | "server = sqlConnection[\"Endpoint\"]\r\n",
15 | "database = sqlConnection[\"Database\"]\r\n",
16 | "username = sqlConnection[\"Id\"]\r\n",
17 | "password = sqlConnection[\"AuthKey\"]\r\n",
18 | "driver= '{ODBC Driver 17 for SQL Server}'\r\n",
19 | "sqlConnectionString = 'DRIVER='+driver+';SERVER=tcp:'+server+';PORT=1433;DATABASE='+database+';UID='+username+';PWD='+ password\r\n",
20 | "#print(sqlConnectionString)\r\n",
21 | "\r\n",
22 | "kustolinkedServiceName = 'iiotmfgdev'\r\n",
23 | "kustoConnection = json.loads(mssparkutils.credentials.getPropertiesAll(kustolinkedServiceName))\r\n",
24 | "kustoConnectionString = KustoConnectionStringBuilder.with_aad_application_token_authentication(kustoConnection[\"Endpoint\"],kustoConnection[\"AuthKey\"])\r\n",
25 | "kustodb = kustoConnection[\"Database\"]\r\n",
26 | "#print(kustoConnectionString)"
27 | ],
28 | "outputs": [],
29 | "execution_count": null,
30 | "metadata": {
31 | "jupyter": {
32 | "source_hidden": false,
33 | "outputs_hidden": false
34 | },
35 | "nteract": {
36 | "transient": {
37 | "deleting": false
38 | }
39 | }
40 | }
41 | },
42 | {
43 | "cell_type": "code",
44 | "source": [
45 | "from manufacturingmetrics.oee import OEE\r\n",
46 | "\r\n",
47 | "oeeDate = \"2022-07-01\"\r\n",
48 | "oee = OEE()\r\n",
49 | "oeedf = oee.calculateOEE(oeeDate,sqlConnectionString, kustodb, kustoConnectionString)\r\n",
50 | "oee.saveOEE(oeeDate, oeedf, sqlConnectionString)"
51 | ],
52 | "outputs": [],
53 | "execution_count": null,
54 | "metadata": {
55 | "jupyter": {
56 | "source_hidden": false,
57 | "outputs_hidden": false
58 | },
59 | "nteract": {
60 | "transient": {
61 | "deleting": false
62 | }
63 | }
64 | }
65 | }
66 | ],
67 | "metadata": {
68 | "kernelspec": {
69 | "name": "synapse_pyspark",
70 | "language": "Python",
71 | "display_name": "Synapse PySpark"
72 | },
73 | "language_info": {
74 | "name": "python"
75 | },
76 | "kernel_info": {
77 | "name": "synapse_pyspark"
78 | },
79 | "description": null,
80 | "save_output": true,
81 | "synapse_widget": {
82 | "version": "0.1",
83 | "state": {}
84 | }
85 | },
86 | "nbformat": 4,
87 | "nbformat_minor": 2
88 | }
--------------------------------------------------------------------------------
/3_OEECalculationEngine/notebook/debugOEE.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "# https://docs.microsoft.com/en-us/azure/synapse-analytics/spark/microsoft-spark-utilities?pivots=programming-language-python\n",
10 | "# mssparkutils.credentials.help()\n",
11 | "\n",
12 | "import json\n",
13 | "from azure.kusto.data import KustoClient, KustoConnectionStringBuilder\n",
14 | "\n",
15 | "sqldbLinkedServiceName = 'iiotmfgsqldb'\n",
16 | "sqlConnection = json.loads(mssparkutils.credentials.getPropertiesAll(sqldbLinkedServiceName))\n",
17 | "server = sqlConnection[\"Endpoint\"]\n",
18 | "database = sqlConnection[\"Database\"]\n",
19 | "username = sqlConnection[\"Id\"]\n",
20 | "password = sqlConnection[\"AuthKey\"]\n",
21 | "driver= '{ODBC Driver 17 for SQL Server}'\n",
22 | "sqlConnectionString = 'DRIVER='+driver+';SERVER=tcp:'+server+';PORT=1433;DATABASE='+database+';UID='+username+';PWD='+ password\n",
23 | "#print(sqlConnectionString)\n",
24 | "\n",
25 | "kustolinkedServiceName = 'iiotmfgdev'\n",
26 | "kustoConnection = json.loads(mssparkutils.credentials.getPropertiesAll(kustolinkedServiceName))\n",
27 | "kustoConnectionString = KustoConnectionStringBuilder.with_aad_application_token_authentication(kustoConnection[\"Endpoint\"],kustoConnection[\"AuthKey\"])\n",
28 | "kustodb = kustoConnection[\"Database\"]\n"
29 | ]
30 | },
31 | {
32 | "cell_type": "markdown",
33 | "metadata": {
34 | "nteract": {
35 | "transient": {
36 | "deleting": false
37 | }
38 | }
39 | },
40 | "source": [
41 | "# OEE Date"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": null,
47 | "metadata": {
48 | "jupyter": {
49 | "outputs_hidden": false,
50 | "source_hidden": false
51 | },
52 | "nteract": {
53 | "transient": {
54 | "deleting": false
55 | }
56 | }
57 | },
58 | "outputs": [],
59 | "source": [
60 | "oeeDate = \"2022-10-25\""
61 | ]
62 | },
63 | {
64 | "cell_type": "markdown",
65 | "metadata": {
66 | "nteract": {
67 | "transient": {
68 | "deleting": false
69 | }
70 | }
71 | },
72 | "source": [
73 | "# Get MES Data and Calculate Quality"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": null,
79 | "metadata": {
80 | "jupyter": {
81 | "outputs_hidden": false,
82 | "source_hidden": false
83 | },
84 | "nteract": {
85 | "transient": {
86 | "deleting": false
87 | }
88 | }
89 | },
90 | "outputs": [],
91 | "source": [
92 | "import json\n",
93 | "from typing import List\n",
94 | "import pyodbc\n",
95 | "import urllib\n",
96 | "import pandas as pd\n",
97 | "import os\n",
98 | "from datetime import datetime, timedelta\n",
99 | "from typing import List\n",
100 | "from azure.kusto.data import KustoClient, KustoConnectionStringBuilder\n",
101 | "from azure.kusto.data.exceptions import KustoServiceError\n",
102 | "from azure.kusto.data.helpers import dataframe_from_result_table\n",
103 | "import os\n",
104 | "\n",
105 | "mesOeeDate = \"2022-06-30\" # overriding this as the the sample MES data is only available for this date\n",
106 | "productQualityQuery = f\"\"\"\n",
107 | " select \n",
108 | " l.Id as PlantId, l.PlantName, l.UtcOffsetInHours, \n",
109 | " a.Id as AssetId, a.AssetName,ag.NodeId, ag.StatusTagName, ag.UptimeTagValues, ag.DowntimeTagValues,\n",
110 | " s.Id as ShiftId, s.ShiftName, s.ShiftStartTime, s.ShiftEndTime,\n",
111 | " p.Id as ProductId, p.ProductName, p.IdealProductionUnitsPerMinute, pq.WorkOrder, pq.QuantityIn, pq.QuantityOut, pq.QuantityScraped\n",
112 | " from \n",
113 | " [Assets] as a, \n",
114 | " Locations as l, \n",
115 | " AssetTags as ag, \n",
116 | " Shifts as s,\n",
117 | " Products as p,\n",
118 | " ProductQuality as pq\n",
119 | " where \n",
120 | " a.PlantId = l.Id and \n",
121 | " ag.AssetId = a.Id and \n",
122 | " pq.ShiftId = s.Id and\n",
123 | " pq.AssetId = a.Id and\n",
124 | " p.Id = pq.ProductId and\n",
125 | " pq.CreatedTimeStamp = '{mesOeeDate}'\n",
126 | " order by l.Id, a.Id\n",
127 | "\"\"\" \n",
128 | "plannedDownTimeQuery = f\"\"\"\n",
129 | " select \n",
130 | " ShiftId, sum(PlannedDownTimeInMinutes) as PlannedDownTimeInMinutes\n",
131 | " from ShiftPlannedDownTime\n",
132 | " where CreatedTimeStamp = '{mesOeeDate}'\n",
133 | " group by ShiftId\n",
134 | "\"\"\"\n",
135 | "mesdf = {}\n",
136 | "with pyodbc.connect(sqlConnectionString) as conn:\n",
137 | " with conn.cursor() as cursor:\n",
138 | " qualitydf = pd.read_sql(productQualityQuery, conn)\n",
139 | " downtimedf = pd.read_sql(plannedDownTimeQuery, conn)\n",
140 | " mesdf = pd.merge(qualitydf, downtimedf, how=\"left\",left_on = 'ShiftId', right_on = 'ShiftId')\n",
141 | "\n",
142 | "mesdf[\"Quality\"] = (mesdf[\"QuantityOut\"] / (mesdf[\"QuantityOut\"] + mesdf[\"QuantityScraped\"]) ) * 100\n",
143 | "mesdf.head()"
144 | ]
145 | },
146 | {
147 | "cell_type": "markdown",
148 | "metadata": {
149 | "nteract": {
150 | "transient": {
151 | "deleting": false
152 | }
153 | }
154 | },
155 | "source": [
156 | "# Calculate Availability"
157 | ]
158 | },
159 | {
160 | "cell_type": "code",
161 | "execution_count": null,
162 | "metadata": {
163 | "jupyter": {
164 | "outputs_hidden": false,
165 | "source_hidden": false
166 | },
167 | "nteract": {
168 | "transient": {
169 | "deleting": false
170 | }
171 | }
172 | },
173 | "outputs": [],
174 | "source": [
175 | "client = KustoClient(kustoConnectionString)\n",
176 | "\n",
177 | "mesdf = mesdf.reset_index()\n",
178 | "telemetrydf = pd.DataFrame(columns = ['MachineStatus', 'TotalDurationInMinutes','ShiftId','AssetId'])\n",
179 | "for index, row in mesdf.iterrows():\n",
180 | " startDateTime = datetime.strptime(oeeDate + \" \" + row[\"ShiftStartTime\"].strftime('%H:%M:%S'), '%Y-%m-%d %H:%M:%S') + timedelta(hours=row['UtcOffsetInHours'])\n",
181 | " endDateTime = datetime.strptime(oeeDate + \" \" + row[\"ShiftEndTime\"].strftime('%H:%M:%S'), '%Y-%m-%d %H:%M:%S') + timedelta(hours=row['UtcOffsetInHours'])\n",
182 | " kustoQuery = f\"\"\"\n",
183 | " let _startTime = datetime({startDateTime.strftime('%Y-%m-%d %H:%M:%S')});\n",
184 | " let _endTime = datetime({endDateTime.strftime('%Y-%m-%d %H:%M:%S')});\n",
185 | " telemetry\n",
186 | " | where ExpandedNodeId == \"{row['StatusTagName']}\"\n",
187 | " | where SourceTimestamp >= _startTime and SourceTimestamp <= _endTime\n",
188 | " | order by SourceTimestamp asc\n",
189 | " | extend prevValue = prev(Value), prevTimestamp = prev(SourceTimestamp,1, _startTime)\n",
190 | " | where prevValue != todouble(Value)\n",
191 | " | project ExpandedNodeId, Value, prevTimestamp, SourceTimestamp\n",
192 | " | extend nextValue = next(Value), nextTimestamp = next(prevTimestamp,1, _endTime)\n",
193 | " | where nextValue != todouble(Value)\n",
194 | " | project Duration = todouble(datetime_diff(\"second\",nextTimestamp,prevTimestamp)), MachineStatus = tostring(Value)\n",
195 | " | summarize sum(Duration) by MachineStatus\n",
196 | " | project MachineStatus, TotalDurationInMinutes = round(sum_Duration / 60)\n",
197 | " \"\"\"\n",
198 | " #print(kustoQuery)\n",
199 | " queryResult = client.execute(kustodb, kustoQuery) \n",
200 | " currentdf = dataframe_from_result_table(queryResult.primary_results[0])\n",
201 | " currentdf['ShiftId'] = row['ShiftId']\n",
202 | " currentdf['AssetId'] = row['AssetId']\n",
203 | " currentdf['TimeStatus'] = currentdf.apply(lambda x: 'UptimeMinutes' if x['MachineStatus'] in row['UptimeTagValues'] else 'DowntimeMinutes', axis=1)\n",
204 | " print(currentdf.shape)\n",
205 | " telemetrydf = telemetrydf.append(currentdf,ignore_index=True)\n",
206 | "\n",
207 | "telemetrydf = telemetrydf.groupby(['AssetId', 'ShiftId', 'TimeStatus'])['TotalDurationInMinutes'].sum().reset_index()\n",
208 | "machinestatusdf = telemetrydf.pivot_table('TotalDurationInMinutes', ['AssetId', 'ShiftId'], 'TimeStatus')\n",
209 | "\n",
210 | "machinestatusdf.head()"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": null,
216 | "metadata": {
217 | "jupyter": {
218 | "outputs_hidden": false,
219 | "source_hidden": false
220 | },
221 | "nteract": {
222 | "transient": {
223 | "deleting": false
224 | }
225 | }
226 | },
227 | "outputs": [],
228 | "source": [
229 | "oeedf = pd.merge(mesdf, machinestatusdf, how=\"left\",left_on = ['ShiftId', 'AssetId'], right_on = ['ShiftId', 'AssetId'])\n",
230 | "oeedf['TotalProductionTimeInMinutes'] = round((pd.to_datetime(oeedf['ShiftEndTime'],format='%H:%M:%S') - pd.to_datetime(oeedf['ShiftStartTime'],format='%H:%M:%S')).dt.total_seconds() / 60)\n",
231 | "oeedf['PlannedProductionTimeInMinutes'] = oeedf['TotalProductionTimeInMinutes'] - oeedf['PlannedDownTimeInMinutes']\n",
232 | "oeedf['Availability'] = ((oeedf['PlannedProductionTimeInMinutes'] - oeedf['DowntimeMinutes']) / oeedf['PlannedProductionTimeInMinutes']) * 100\n",
233 | "\n",
234 | "# Calculate Performance\n",
235 | "oeedf['CycleTimeInMinutes'] = (1 / oeedf['IdealProductionUnitsPerMinute'])\n",
236 | "oeedf['Performance'] = (((oeedf['QuantityOut'] + oeedf['QuantityScraped']) * oeedf['CycleTimeInMinutes']) / oeedf['PlannedProductionTimeInMinutes']) * 100\n",
237 | "\n",
238 | "# Calculate OEE\n",
239 | "oeedf['OEE'] = ((oeedf['Availability']/100) * (oeedf['Performance']/100) * (oeedf['Quality']/100)) * 100\n",
240 | "\n",
241 | "# Calculate OEE Losses\n",
242 | "oeedf['AvailabilityLoss'] = ((oeedf['DowntimeMinutes'] / oeedf['CycleTimeInMinutes']) / ( oeedf['PlannedProductionTimeInMinutes'] / oeedf['CycleTimeInMinutes'])) * 100\n",
243 | "oeedf['QualityLoss'] = (oeedf['QuantityScraped'] / ( oeedf['PlannedProductionTimeInMinutes'] / oeedf['CycleTimeInMinutes'])) * 100\n",
244 | "oeedf['SpeedLoss'] = 100 - oeedf['AvailabilityLoss'] - oeedf['QualityLoss'] - oeedf['OEE']\n",
245 | "\n",
246 | "oeedf.head()"
247 | ]
248 | }
249 | ],
250 | "metadata": {
251 | "kernel_info": {
252 | "name": "synapse_pyspark"
253 | },
254 | "kernelspec": {
255 | "display_name": "Python 3.9.13 64-bit (microsoft store)",
256 | "language": "python",
257 | "name": "python3"
258 | },
259 | "language_info": {
260 | "name": "python",
261 | "version": "3.9.13"
262 | },
263 | "save_output": true,
264 | "synapse_widget": {
265 | "state": {},
266 | "version": "0.1"
267 | },
268 | "vscode": {
269 | "interpreter": {
270 | "hash": "e26c73b246f701fd7ac930ed79dba21d807638bf330302ee46d3692ef1c54949"
271 | }
272 | }
273 | },
274 | "nbformat": 4,
275 | "nbformat_minor": 2
276 | }
277 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/PACKAGEDETAILS.md:
--------------------------------------------------------------------------------
1 | # Calculate OEE
2 |
3 | - Calculates OEE based on given configuration parameters as shown below:
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/dist/manufacturingmetrics-0.1.0-py3-none-any.whl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/3_OEECalculationEngine/package/dist/manufacturingmetrics-0.1.0-py3-none-any.whl
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics.egg-info/PKG-INFO:
--------------------------------------------------------------------------------
1 | Metadata-Version: 2.1
2 | Name: manufacturingmetrics
3 | Version: 0.1.0
4 | Summary: Package to calculate manufacturing performance metrics and KPIs like OEE
5 | Home-page: UNKNOWN
6 | Author: Jomit Vaghela
7 | Author-email:
8 | License: UNKNOWN
9 | Platform: UNKNOWN
10 | Classifier: Programming Language :: Python :: 3
11 | Classifier: License :: OSI Approved :: MIT License
12 | Classifier: Operating System :: OS Independent
13 | Requires-Python: >=3.7
14 | Description-Content-Type: text/markdown
15 |
16 | # Calculate OEE
17 |
18 | - Calculates OEE based on given configuration parameters as shown below:
19 |
20 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics.egg-info/SOURCES.txt:
--------------------------------------------------------------------------------
1 | setup.py
2 | manufacturingmetrics/__init__.py
3 | manufacturingmetrics/oee.py
4 | manufacturingmetrics.egg-info/PKG-INFO
5 | manufacturingmetrics.egg-info/SOURCES.txt
6 | manufacturingmetrics.egg-info/dependency_links.txt
7 | manufacturingmetrics.egg-info/not-zip-safe
8 | manufacturingmetrics.egg-info/top_level.txt
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics.egg-info/not-zip-safe:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | manufacturingmetrics
2 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018 The Python Packaging Authority
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """Top-level package for manufacturingmetrics."""
4 |
5 | __author__ = """Jomit Vaghela"""
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/manufacturingmetrics/oee.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import List
3 | import pyodbc
4 | import urllib
5 | import pandas as pd
6 | import os
7 | from datetime import datetime, timedelta
8 | from typing import List
9 | from azure.kusto.data import KustoClient, KustoConnectionStringBuilder
10 | from azure.kusto.data.exceptions import KustoServiceError
11 | from azure.kusto.data.helpers import dataframe_from_result_table
12 | import os
13 |
14 | class OEE():
15 |
16 | def __init__(self):
17 | super().__init__()
18 |
19 | def __getMESData(self, sqlDbConnectionString, oeeDate):
20 | oeeDate = "2022-06-30" # overriding this as the the sample MES data is only available for this date
21 | productQualityQuery = f"""
22 | select
23 | l.Id as PlantId, l.PlantName, l.UtcOffsetInHours,
24 | a.Id as AssetId, a.AssetName,ag.NodeId, ag.StatusTagName, ag.UptimeTagValues, ag.DowntimeTagValues,
25 | s.Id as ShiftId, s.ShiftName, s.ShiftStartTime, s.ShiftEndTime,
26 | p.Id as ProductId, p.ProductName, p.IdealProductionUnitsPerMinute, pq.WorkOrder, pq.QuantityIn, pq.QuantityOut, pq.QuantityScraped
27 | from
28 | [Assets] as a,
29 | Locations as l,
30 | AssetTags as ag,
31 | Shifts as s,
32 | Products as p,
33 | ProductQuality as pq
34 | where
35 | a.PlantId = l.Id and
36 | ag.AssetId = a.Id and
37 | pq.ShiftId = s.Id and
38 | pq.AssetId = a.Id and
39 | p.Id = pq.ProductId and
40 | pq.CreatedTimeStamp = '{oeeDate}'
41 | order by l.Id, a.Id
42 | """
43 | plannedDownTimeQuery = f"""
44 | select
45 | ShiftId, sum(PlannedDownTimeInMinutes) as PlannedDownTimeInMinutes
46 | from ShiftPlannedDownTime
47 | where CreatedTimeStamp = '{oeeDate}'
48 | group by ShiftId
49 | """
50 |
51 | with pyodbc.connect(sqlDbConnectionString) as conn:
52 | with conn.cursor() as cursor:
53 | qualitydf = pd.read_sql(productQualityQuery, conn)
54 | downtimedf = pd.read_sql(plannedDownTimeQuery, conn)
55 | return pd.merge(qualitydf, downtimedf, how="left",left_on = 'ShiftId', right_on = 'ShiftId')
56 |
57 | def __getMachineStatusData(self, kustodb, kustoConnectionString, oeeDate, mesdf):
58 | client = KustoClient(kustoConnectionString)
59 |
60 | mesdf = mesdf.reset_index()
61 | telemetrydf = pd.DataFrame(columns = ['MachineStatus', 'TotalDurationInMinutes','ShiftId','AssetId'])
62 | for index, row in mesdf.iterrows():
63 | startDateTime = datetime.strptime(oeeDate + " " + row["ShiftStartTime"].strftime('%H:%M:%S'), '%Y-%m-%d %H:%M:%S') + timedelta(hours=row['UtcOffsetInHours'])
64 | endDateTime = datetime.strptime(oeeDate + " " + row["ShiftEndTime"].strftime('%H:%M:%S'), '%Y-%m-%d %H:%M:%S') + timedelta(hours=row['UtcOffsetInHours'])
65 | kustoQuery = f"""
66 | let _startTime = datetime({startDateTime.strftime('%Y-%m-%d %H:%M:%S')});
67 | let _endTime = datetime({endDateTime.strftime('%Y-%m-%d %H:%M:%S')});
68 | telemetry
69 | | where ExpandedNodeId == "{row['StatusTagName']}"
70 | | where SourceTimestamp >= _startTime and SourceTimestamp <= _endTime
71 | | order by SourceTimestamp asc
72 | | extend prevValue = prev(Value), prevTimestamp = prev(SourceTimestamp,1, _startTime)
73 | | where prevValue != todouble(Value)
74 | | project ExpandedNodeId, Value, prevTimestamp, SourceTimestamp
75 | | extend nextValue = next(Value), nextTimestamp = next(prevTimestamp,1, _endTime)
76 | | where nextValue != todouble(Value)
77 | | project Duration = todouble(datetime_diff("second",nextTimestamp,prevTimestamp)), MachineStatus = tostring(Value)
78 | | summarize sum(Duration) by MachineStatus
79 | | project MachineStatus, TotalDurationInMinutes = round(sum_Duration / 60)
80 | """
81 | #print(kustoQuery)
82 | queryResult = client.execute(kustodb, kustoQuery)
83 | currentdf = dataframe_from_result_table(queryResult.primary_results[0])
84 | currentdf['ShiftId'] = row['ShiftId']
85 | currentdf['AssetId'] = row['AssetId']
86 | currentdf['TimeStatus'] = currentdf.apply(lambda x: 'UptimeMinutes' if x['MachineStatus'] in row['UptimeTagValues'] else 'DowntimeMinutes', axis=1)
87 | telemetrydf = telemetrydf.append(currentdf,ignore_index=True)
88 |
89 | telemetrydf = telemetrydf.groupby(['AssetId', 'ShiftId', 'TimeStatus'])['TotalDurationInMinutes'].sum().reset_index()
90 | telemetrydf = telemetrydf.pivot_table('TotalDurationInMinutes', ['AssetId', 'ShiftId'], 'TimeStatus')
91 | return telemetrydf
92 |
93 | def calculateOEE(self, oeeDate, sqlConnectionString, kustodb, kustoConnectionString):
94 |
95 | # Get MES Data and Calculate Quality
96 | mesdf = self.__getMESData(sqlConnectionString,oeeDate)
97 | mesdf["Quality"] = (mesdf["QuantityOut"] / (mesdf["QuantityOut"] + mesdf["QuantityScraped"]) ) * 100
98 |
99 | # Calculate Availability
100 | machinestatusdf = self.__getMachineStatusData(kustodb, kustoConnectionString, oeeDate, mesdf)
101 | oeedf = pd.merge(mesdf, machinestatusdf, how="left",left_on = ['ShiftId', 'AssetId'], right_on = ['ShiftId', 'AssetId'])
102 | oeedf['TotalProductionTimeInMinutes'] = round((pd.to_datetime(oeedf['ShiftEndTime'],format='%H:%M:%S') - pd.to_datetime(oeedf['ShiftStartTime'],format='%H:%M:%S')).dt.total_seconds() / 60)
103 | oeedf['PlannedProductionTimeInMinutes'] = oeedf['TotalProductionTimeInMinutes'] - oeedf['PlannedDownTimeInMinutes']
104 | oeedf['Availability'] = ((oeedf['PlannedProductionTimeInMinutes'] - oeedf['DowntimeMinutes']) / oeedf['PlannedProductionTimeInMinutes']) * 100
105 |
106 | # Calculate Performance
107 | oeedf['CycleTimeInMinutes'] = (1 / oeedf['IdealProductionUnitsPerMinute'])
108 | oeedf['Performance'] = (((oeedf['QuantityOut'] + oeedf['QuantityScraped']) * oeedf['CycleTimeInMinutes']) / oeedf['PlannedProductionTimeInMinutes']) * 100
109 |
110 | # Calculate OEE
111 | oeedf['OEE'] = ((oeedf['Availability']/100) * (oeedf['Performance']/100) * (oeedf['Quality']/100)) * 100
112 |
113 | # Calculate OEE Losses
114 | oeedf['AvailabilityLoss'] = ((oeedf['DowntimeMinutes'] / oeedf['CycleTimeInMinutes']) / ( oeedf['PlannedProductionTimeInMinutes'] / oeedf['CycleTimeInMinutes'])) * 100
115 | oeedf['QualityLoss'] = (oeedf['QuantityScraped'] / ( oeedf['PlannedProductionTimeInMinutes'] / oeedf['CycleTimeInMinutes'])) * 100
116 | oeedf['SpeedLoss'] = 100 - oeedf['AvailabilityLoss'] - oeedf['QualityLoss'] - oeedf['OEE']
117 | return oeedf
118 |
119 | def saveOEE(self, oeeDate, oeedf, sqlConnectionString):
120 | with pyodbc.connect(sqlConnectionString) as conn:
121 | with conn.cursor() as cursor:
122 | for index, row in oeedf.iterrows():
123 | insertQuery = f"""
124 | INSERT INTO [dbo].[OEE]
125 | ([PlantId],[AssetId],[ShiftId],[ProductId],[WorkOrder]
126 | ,[TotalUnits],[GoodUnits],[ScrapedUnits],[Quality]
127 | ,[PlannedDownTimeInMinutes],[DowntimeMinutes],[UptimeMinutes]
128 | ,[TotalProductionTimeInMinutes],[PlannedProductionTimeInMinutes]
129 | ,[Availability],[CycleTimeInMinutes],[Performance],[OEE]
130 | ,[AvailabilityLoss],[QualityLoss],[SpeedLoss],[OEEDate])
131 | VALUES
132 | ({row.PlantId},{row.AssetId},{row.ShiftId},{row.ProductId},'{row.WorkOrder}'
133 | ,{row.QuantityIn},{row.QuantityOut},{row.QuantityScraped},{row.Quality}
134 | ,{row.PlannedDownTimeInMinutes},{row.DowntimeMinutes},{row.UptimeMinutes}
135 | ,{row.TotalProductionTimeInMinutes},{row.PlannedProductionTimeInMinutes}
136 | ,{row.Availability},{row.CycleTimeInMinutes},{row.Performance},{row.OEE}
137 | ,{row.AvailabilityLoss},{row.QualityLoss},{row.SpeedLoss},'{oeeDate}')
138 | """
139 | #print(insertQuery)
140 | cursor.execute(insertQuery)
141 | conn.commit()
142 |
143 |
144 |
145 |
146 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/requirements.txt:
--------------------------------------------------------------------------------
1 | pyodbc==4.0.30
2 | azure-kusto-data==2.1.1
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/sample_env:
--------------------------------------------------------------------------------
1 | kusto_db_name=""
2 | kusto_aad_tenant_id=""
3 | kusto_cluster_url="https://.kusto.windows.net/"
4 | kusto_app_id=""
5 | kusto_app_secret=""
6 |
7 | sql_server=".database.windows.net"
8 | sql_db_name=""
9 | sql_username=""
10 | sql_password=""
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 | with open("PACKAGEDETAILS.md", "r") as fh:
4 | long_description = fh.read()
5 |
6 | setuptools.setup(
7 | name="manufacturingmetrics",
8 | version="0.1.0",
9 | author="Jomit Vaghela",
10 | author_email="",
11 | description="Package to calculate manufacturing performance metrics and KPIs like OEE",
12 | long_description=long_description,
13 | long_description_content_type="text/markdown",
14 | packages=setuptools.find_packages(),
15 | classifiers=[
16 | "Programming Language :: Python :: 3",
17 | "License :: OSI Approved :: MIT License",
18 | "Operating System :: OS Independent",
19 | ],
20 | python_requires='>=3.7',
21 | zip_safe=False
22 | )
--------------------------------------------------------------------------------
/3_OEECalculationEngine/package/test.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import List
3 | import pandas as pd
4 | import os
5 | from azure.kusto.data import KustoClient, KustoConnectionStringBuilder
6 | from azure.kusto.data.exceptions import KustoServiceError
7 | from azure.kusto.data.helpers import dataframe_from_result_table
8 | from manufacturingmetrics.oee import OEE
9 | from dotenv import load_dotenv
10 | load_dotenv()
11 |
12 | server = os.getenv("sql_server")
13 | database = os.getenv("sql_db_name")
14 | username = os.getenv("sql_username")
15 | password = os.getenv("sql_password")
16 | driver= '{ODBC Driver 17 for SQL Server}'
17 | sqlConnectionString = 'DRIVER='+driver+';SERVER=tcp:'+server+';PORT=1433;DATABASE='+database+';UID='+username+';PWD='+ password
18 |
19 | aadTenantId = os.getenv("kusto_aad_tenant_id")
20 | cluster = os.getenv("kusto_cluster_url")
21 | appId = os.getenv("kusto_app_id")
22 | appSecret = os.getenv("kusto_app_secret")
23 | kustodb = os.getenv("kusto_db_name")
24 | kustoConnectionString = KustoConnectionStringBuilder.with_aad_application_key_authentication(cluster,appId,appSecret,aadTenantId)
25 |
26 | oeeDate = "2022-07-04"
27 | oee = OEE()
28 | oeedf = oee.calculateOEE (oeeDate,sqlConnectionString, kustodb, kustoConnectionString)
29 | oee.saveOEE(oeeDate, oeedf, sqlConnectionString)
30 | oeedf.to_csv("oeedf.csv")
31 | print(oeedf.head())
32 |
33 | # import pkg_resources
34 | # for d in pkg_resources.working_set:
35 | # print(d)
36 |
37 | #configJson = json.loads('{ "oeeDate": "2022-06-19" }')
--------------------------------------------------------------------------------
/3_OEECalculationEngine/powerbi/oee.pbix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/3_OEECalculationEngine/powerbi/oee.pbix
--------------------------------------------------------------------------------
/3_OEECalculationEngine/powerbi/samplequery.sql:
--------------------------------------------------------------------------------
1 | SELECT l.PlantName
2 | ,l.PlantLocation
3 | ,a.AssetName
4 | ,s.ShiftName
5 | ,p.ProductName
6 | ,WorkOrder
7 | ,TotalUnits
8 | ,GoodUnits
9 | ,ScrapedUnits
10 | ,Quality
11 | ,PlannedDownTimeInMinutes
12 | ,DowntimeMinutes
13 | ,UptimeMinutes
14 | ,TotalProductionTimeInMinutes
15 | ,PlannedProductionTimeInMinutes
16 | ,Availability
17 | ,CycleTimeInMinutes
18 | ,Performance
19 | ,OEE
20 | ,AvailabilityLoss
21 | ,QualityLoss
22 | ,SpeedLoss
23 | ,OEEDate
24 | FROM OEE as oee, Locations as l, Assets as a, Products as p, Shifts as s
25 | WHERE oee.PlantId = l.Id and oee.AssetId = a.Id and oee.ShiftId = s.Id and oee.ProductId = p.Id
26 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/sqldb/mes-reporting.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE [dbo].[OEE] DROP CONSTRAINT [DF__OEE__CreatedTime__17036CC0]
2 | GO
3 | /****** Object: Table [dbo].[Shifts] Script Date: 7/6/2022 5:39:31 PM ******/
4 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[Shifts]') AND type in (N'U'))
5 | DROP TABLE [dbo].[Shifts]
6 | GO
7 | /****** Object: Table [dbo].[ShiftPlannedDownTime] Script Date: 7/6/2022 5:39:31 PM ******/
8 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[ShiftPlannedDownTime]') AND type in (N'U'))
9 | DROP TABLE [dbo].[ShiftPlannedDownTime]
10 | GO
11 | /****** Object: Table [dbo].[Products] Script Date: 7/6/2022 5:39:31 PM ******/
12 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[Products]') AND type in (N'U'))
13 | DROP TABLE [dbo].[Products]
14 | GO
15 | /****** Object: Table [dbo].[ProductQuality] Script Date: 7/6/2022 5:39:31 PM ******/
16 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[ProductQuality]') AND type in (N'U'))
17 | DROP TABLE [dbo].[ProductQuality]
18 | GO
19 | /****** Object: Table [dbo].[OEE] Script Date: 7/6/2022 5:39:31 PM ******/
20 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[OEE]') AND type in (N'U'))
21 | DROP TABLE [dbo].[OEE]
22 | GO
23 | /****** Object: Table [dbo].[Locations] Script Date: 7/6/2022 5:39:31 PM ******/
24 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[Locations]') AND type in (N'U'))
25 | DROP TABLE [dbo].[Locations]
26 | GO
27 | /****** Object: Table [dbo].[AssetTags] Script Date: 7/6/2022 5:39:31 PM ******/
28 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[AssetTags]') AND type in (N'U'))
29 | DROP TABLE [dbo].[AssetTags]
30 | GO
31 | /****** Object: Table [dbo].[Assets] Script Date: 7/6/2022 5:39:31 PM ******/
32 | IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[Assets]') AND type in (N'U'))
33 | DROP TABLE [dbo].[Assets]
34 | GO
35 | /****** Object: Table [dbo].[Assets] Script Date: 7/6/2022 5:39:31 PM ******/
36 | SET ANSI_NULLS ON
37 | GO
38 | SET QUOTED_IDENTIFIER ON
39 | GO
40 | CREATE TABLE [dbo].[Assets](
41 | [Id] [int] IDENTITY(1,1) NOT NULL,
42 | [PlantId] [int] NOT NULL,
43 | [AssetName] [nvarchar](100) NOT NULL
44 | ) ON [PRIMARY]
45 | GO
46 | /****** Object: Table [dbo].[AssetTags] Script Date: 7/6/2022 5:39:31 PM ******/
47 | SET ANSI_NULLS ON
48 | GO
49 | SET QUOTED_IDENTIFIER ON
50 | GO
51 | CREATE TABLE [dbo].[AssetTags](
52 | [AssetId] [int] NOT NULL,
53 | [NodeId] [nvarchar](200) NOT NULL,
54 | [StatusTagName] [nvarchar](100) NOT NULL,
55 | [UptimeTagValues] [nvarchar](100) NOT NULL,
56 | [DowntimeTagValues] [nvarchar](100) NOT NULL
57 | ) ON [PRIMARY]
58 | GO
59 | /****** Object: Table [dbo].[Locations] Script Date: 7/6/2022 5:39:31 PM ******/
60 | SET ANSI_NULLS ON
61 | GO
62 | SET QUOTED_IDENTIFIER ON
63 | GO
64 | CREATE TABLE [dbo].[Locations](
65 | [Id] [int] IDENTITY(1,1) NOT NULL,
66 | [PlantName] [nvarchar](100) NOT NULL,
67 | [PlantLocation] [nvarchar](100) NOT NULL,
68 | [UtcOffsetInHours] [int] NOT NULL
69 | ) ON [PRIMARY]
70 | GO
71 | /****** Object: Table [dbo].[OEE] Script Date: 7/6/2022 5:39:31 PM ******/
72 | SET ANSI_NULLS ON
73 | GO
74 | SET QUOTED_IDENTIFIER ON
75 | GO
76 | CREATE TABLE [dbo].[OEE](
77 | [PlantId] [int] NOT NULL,
78 | [AssetId] [int] NOT NULL,
79 | [ShiftId] [int] NOT NULL,
80 | [ProductId] [int] NOT NULL,
81 | [WorkOrder] [nvarchar](100) NOT NULL,
82 | [TotalUnits] [float] NOT NULL,
83 | [GoodUnits] [float] NOT NULL,
84 | [ScrapedUnits] [float] NOT NULL,
85 | [Quality] [float] NOT NULL,
86 | [PlannedDownTimeInMinutes] [float] NOT NULL,
87 | [DowntimeMinutes] [float] NOT NULL,
88 | [UptimeMinutes] [float] NOT NULL,
89 | [TotalProductionTimeInMinutes] [float] NOT NULL,
90 | [PlannedProductionTimeInMinutes] [float] NOT NULL,
91 | [Availability] [float] NOT NULL,
92 | [CycleTimeInMinutes] [float] NOT NULL,
93 | [Performance] [float] NOT NULL,
94 | [OEE] [float] NOT NULL,
95 | [AvailabilityLoss] [float] NOT NULL,
96 | [QualityLoss] [float] NOT NULL,
97 | [SpeedLoss] [float] NOT NULL,
98 | [OEEDate] [date] NOT NULL,
99 | [CreatedTimeStamp] [datetime2](7) NOT NULL
100 | ) ON [PRIMARY]
101 | GO
102 | /****** Object: Table [dbo].[ProductQuality] Script Date: 7/6/2022 5:39:31 PM ******/
103 | SET ANSI_NULLS ON
104 | GO
105 | SET QUOTED_IDENTIFIER ON
106 | GO
107 | CREATE TABLE [dbo].[ProductQuality](
108 | [WorkOrder] [nvarchar](100) NOT NULL,
109 | [ShiftId] [int] NOT NULL,
110 | [AssetId] [int] NOT NULL,
111 | [ProductId] [int] NOT NULL,
112 | [QuantityIn] [int] NOT NULL,
113 | [QuantityOut] [int] NOT NULL,
114 | [QuantityScraped] [int] NOT NULL,
115 | [CreatedTimeStamp] [datetime] NOT NULL
116 | ) ON [PRIMARY]
117 | GO
118 | /****** Object: Table [dbo].[Products] Script Date: 7/6/2022 5:39:31 PM ******/
119 | SET ANSI_NULLS ON
120 | GO
121 | SET QUOTED_IDENTIFIER ON
122 | GO
123 | CREATE TABLE [dbo].[Products](
124 | [Id] [int] IDENTITY(1,1) NOT NULL,
125 | [ProductName] [nvarchar](100) NOT NULL,
126 | [IdealProductionUnitsPerMinute] [int] NOT NULL
127 | ) ON [PRIMARY]
128 | GO
129 | /****** Object: Table [dbo].[ShiftPlannedDownTime] Script Date: 7/6/2022 5:39:31 PM ******/
130 | SET ANSI_NULLS ON
131 | GO
132 | SET QUOTED_IDENTIFIER ON
133 | GO
134 | CREATE TABLE [dbo].[ShiftPlannedDownTime](
135 | [Id] [int] IDENTITY(1,1) NOT NULL,
136 | [ShiftId] [int] NOT NULL,
137 | [PlannedDownTimeReason] [nvarchar](100) NOT NULL,
138 | [PlannedDownTimeInMinutes] [int] NOT NULL,
139 | [CreatedTimeStamp] [datetime] NOT NULL
140 | ) ON [PRIMARY]
141 | GO
142 | /****** Object: Table [dbo].[Shifts] Script Date: 7/6/2022 5:39:31 PM ******/
143 | SET ANSI_NULLS ON
144 | GO
145 | SET QUOTED_IDENTIFIER ON
146 | GO
147 | CREATE TABLE [dbo].[Shifts](
148 | [Id] [int] IDENTITY(1,1) NOT NULL,
149 | [PlantId] [int] NOT NULL,
150 | [ShiftName] [nvarchar](50) NOT NULL,
151 | [ShiftStartTime] [time](7) NOT NULL,
152 | [ShiftEndTime] [time](7) NOT NULL
153 | ) ON [PRIMARY]
154 | GO
155 | SET IDENTITY_INSERT [dbo].[Assets] ON
156 | GO
157 | INSERT [dbo].[Assets] ([Id], [PlantId], [AssetName]) VALUES (1, 1, N'Line1')
158 | GO
159 | INSERT [dbo].[Assets] ([Id], [PlantId], [AssetName]) VALUES (2, 1, N'Line2')
160 | GO
161 | INSERT [dbo].[Assets] ([Id], [PlantId], [AssetName]) VALUES (3, 2, N'Line3')
162 | GO
163 | INSERT [dbo].[Assets] ([Id], [PlantId], [AssetName]) VALUES (4, 2, N'Line4')
164 | GO
165 | SET IDENTITY_INSERT [dbo].[Assets] OFF
166 | GO
167 | INSERT [dbo].[AssetTags] ([AssetId], [NodeId], [StatusTagName], [UptimeTagValues], [DowntimeTagValues]) VALUES (1, N'opc.tcp://10.0.0.4:49320_b475f0d02c5268c4e3942a507df64b61abe193e9', N'nsu=KEPServerEX;s=Simulator.Line1.Status', N'1,2,3', N'4')
168 | GO
169 | INSERT [dbo].[AssetTags] ([AssetId], [NodeId], [StatusTagName], [UptimeTagValues], [DowntimeTagValues]) VALUES (2, N'opc.tcp://10.0.0.4:49320_b475f0d02c5268c4e3942a507df64b61abe193e9', N'nsu=KEPServerEX;s=Simulator.Line2.Status', N'1,2,3', N'4')
170 | GO
171 | INSERT [dbo].[AssetTags] ([AssetId], [NodeId], [StatusTagName], [UptimeTagValues], [DowntimeTagValues]) VALUES (3, N'opc.tcp://10.0.0.4:49320_b475f0d02c5268c4e3942a507df64b61abe193e9', N'nsu=KEPServerEX;s=Simulator.Line3.Status', N'1,2,3', N'4')
172 | GO
173 | INSERT [dbo].[AssetTags] ([AssetId], [NodeId], [StatusTagName], [UptimeTagValues], [DowntimeTagValues]) VALUES (4, N'opc.tcp://10.0.0.4:49320_b475f0d02c5268c4e3942a507df64b61abe193e9', N'nsu=KEPServerEX;s=Simulator.Line4.Status', N'1,2,3', N'4')
174 | GO
175 | SET IDENTITY_INSERT [dbo].[Locations] ON
176 | GO
177 | INSERT [dbo].[Locations] ([Id], [PlantName], [PlantLocation], [UtcOffsetInHours]) VALUES (1, N'Plant-1', N'North America', -7)
178 | GO
179 | INSERT [dbo].[Locations] ([Id], [PlantName], [PlantLocation], [UtcOffsetInHours]) VALUES (2, N'Plant-2', N'South America', -5)
180 | GO
181 | SET IDENTITY_INSERT [dbo].[Locations] OFF
182 | GO
183 | INSERT [dbo].[ProductQuality] ([WorkOrder], [ShiftId], [AssetId], [ProductId], [QuantityIn], [QuantityOut], [QuantityScraped], [CreatedTimeStamp]) VALUES (N'WB001', 1, 1, 1, 400, 375, 25, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
184 | GO
185 | INSERT [dbo].[ProductQuality] ([WorkOrder], [ShiftId], [AssetId], [ProductId], [QuantityIn], [QuantityOut], [QuantityScraped], [CreatedTimeStamp]) VALUES (N'WB002', 2, 2, 2, 800, 770, 30, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
186 | GO
187 | INSERT [dbo].[ProductQuality] ([WorkOrder], [ShiftId], [AssetId], [ProductId], [QuantityIn], [QuantityOut], [QuantityScraped], [CreatedTimeStamp]) VALUES (N'WB003', 3, 3, 3, 1185, 1180, 5, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
188 | GO
189 | INSERT [dbo].[ProductQuality] ([WorkOrder], [ShiftId], [AssetId], [ProductId], [QuantityIn], [QuantityOut], [QuantityScraped], [CreatedTimeStamp]) VALUES (N'WB004', 1, 4, 1, 400, 380, 20, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
190 | GO
191 | INSERT [dbo].[ProductQuality] ([WorkOrder], [ShiftId], [AssetId], [ProductId], [QuantityIn], [QuantityOut], [QuantityScraped], [CreatedTimeStamp]) VALUES (N'WB005', 2, 4, 3, 1180, 1175, 5, CAST(N'2022-07-01T00:00:00.000' AS DateTime))
192 | GO
193 | SET IDENTITY_INSERT [dbo].[Products] ON
194 | GO
195 | INSERT [dbo].[Products] ([Id], [ProductName], [IdealProductionUnitsPerMinute]) VALUES (1, N'Product-1', 1)
196 | GO
197 | INSERT [dbo].[Products] ([Id], [ProductName], [IdealProductionUnitsPerMinute]) VALUES (2, N'Product-2', 2)
198 | GO
199 | INSERT [dbo].[Products] ([Id], [ProductName], [IdealProductionUnitsPerMinute]) VALUES (3, N'Product-3', 3)
200 | GO
201 | SET IDENTITY_INSERT [dbo].[Products] OFF
202 | GO
203 | SET IDENTITY_INSERT [dbo].[ShiftPlannedDownTime] ON
204 | GO
205 | INSERT [dbo].[ShiftPlannedDownTime] ([Id], [ShiftId], [PlannedDownTimeReason], [PlannedDownTimeInMinutes], [CreatedTimeStamp]) VALUES (1, 1, N'Breaks', 50, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
206 | GO
207 | INSERT [dbo].[ShiftPlannedDownTime] ([Id], [ShiftId], [PlannedDownTimeReason], [PlannedDownTimeInMinutes], [CreatedTimeStamp]) VALUES (2, 2, N'Breaks', 50, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
208 | GO
209 | INSERT [dbo].[ShiftPlannedDownTime] ([Id], [ShiftId], [PlannedDownTimeReason], [PlannedDownTimeInMinutes], [CreatedTimeStamp]) VALUES (3, 2, N'Other', 15, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
210 | GO
211 | INSERT [dbo].[ShiftPlannedDownTime] ([Id], [ShiftId], [PlannedDownTimeReason], [PlannedDownTimeInMinutes], [CreatedTimeStamp]) VALUES (4, 3, N'Breaks', 50, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
212 | GO
213 | INSERT [dbo].[ShiftPlannedDownTime] ([Id], [ShiftId], [PlannedDownTimeReason], [PlannedDownTimeInMinutes], [CreatedTimeStamp]) VALUES (5, 3, N'Changeover', 10, CAST(N'2022-06-30T00:00:00.000' AS DateTime))
214 | GO
215 | SET IDENTITY_INSERT [dbo].[ShiftPlannedDownTime] OFF
216 | GO
217 | SET IDENTITY_INSERT [dbo].[Shifts] ON
218 | GO
219 | INSERT [dbo].[Shifts] ([Id], [PlantId], [ShiftName], [ShiftStartTime], [ShiftEndTime]) VALUES (1, 1, N'First', CAST(N'00:00:00' AS Time), CAST(N'07:59:59' AS Time))
220 | GO
221 | INSERT [dbo].[Shifts] ([Id], [PlantId], [ShiftName], [ShiftStartTime], [ShiftEndTime]) VALUES (2, 1, N'Second', CAST(N'08:00:00' AS Time), CAST(N'15:59:59' AS Time))
222 | GO
223 | INSERT [dbo].[Shifts] ([Id], [PlantId], [ShiftName], [ShiftStartTime], [ShiftEndTime]) VALUES (3, 1, N'Third', CAST(N'16:00:00' AS Time), CAST(N'23:59:59' AS Time))
224 | GO
225 | INSERT [dbo].[Shifts] ([Id], [PlantId], [ShiftName], [ShiftStartTime], [ShiftEndTime]) VALUES (4, 2, N'First', CAST(N'00:00:00' AS Time), CAST(N'07:59:59' AS Time))
226 | GO
227 | INSERT [dbo].[Shifts] ([Id], [PlantId], [ShiftName], [ShiftStartTime], [ShiftEndTime]) VALUES (5, 2, N'Second', CAST(N'08:00:00' AS Time), CAST(N'15:59:59' AS Time))
228 | GO
229 | INSERT [dbo].[Shifts] ([Id], [PlantId], [ShiftName], [ShiftStartTime], [ShiftEndTime]) VALUES (6, 2, N'Third', CAST(N'16:00:00' AS Time), CAST(N'23:59:59' AS Time))
230 | GO
231 | SET IDENTITY_INSERT [dbo].[Shifts] OFF
232 | GO
233 | ALTER TABLE [dbo].[OEE] ADD DEFAULT (getdate()) FOR [CreatedTimeStamp]
234 | GO
235 |
--------------------------------------------------------------------------------
/3_OEECalculationEngine/synapse/adxLinkedService.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "AzureDataExplorerLinkedService",
3 | "properties": {
4 | "type": "AzureDataExplorer",
5 | "typeProperties": {
6 | "endpoint": "https://iiotmfgdev.westus2.kusto.windows.net",
7 | "database": "mfgdb",
8 | "tenant": "",
9 | "servicePrincipalId": "",
10 | "servicePrincipalKey": {
11 | "type": "SecureString",
12 | "value": ""
13 | }
14 | }
15 | }
16 | }
--------------------------------------------------------------------------------
/3_OEECalculationEngine/synapse/sqlLinkedService.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "AzureSqlDbLinkedService",
3 | "properties": {
4 | "type": "AzureSqlDatabase",
5 | "typeProperties": {
6 | "connectionString": "Data Source=iiotsamplesqlserver.database.windows.net;Initial Catalog=iiotsamplesqldb;User ID=azureuser;Password=;Trusted_Connection=False;Encrypt=True;Connection Timeout=30"
7 | },
8 | "connectVia": {
9 | "referenceName": "AutoResolveIntegrationRuntime",
10 | "type": "IntegrationRuntimeReference"
11 | }
12 | }
13 | }
--------------------------------------------------------------------------------
/4_FactorySupplyChainTwin/README.md:
--------------------------------------------------------------------------------
1 | :warning: In progress
2 |
3 | # Factory and Supply Chain Digital Twin
4 |
5 | Goal of this sample is to acceleratre deployment of [Industrial IoT Connectivity Patterns](TODO). There is no one size fits all solution, as there are many [considerations](TODO), please review them before moving your workload to production.
6 |
7 | ## Coming soon...
8 |
9 |
10 | ## Additional Resources
11 |
12 | - [Manufacturing Ontologies](https://github.com/digitaltwinconsortium/ManufacturingOntologies)
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/README.md:
--------------------------------------------------------------------------------
1 | # Exploratory Data Analysis for failure predictions using machine learning
2 |
3 | Goal of this sample is to acceleratre deployment of [Industrial IoT Prediction Patterns](https://docs.microsoft.com/en-us/azure/architecture/guide/iiot-patterns/iiot-prediction-patterns). There is no one size fits all solution, as there are many considerations, please review them before moving your workload to production.
4 |
5 | Exploratory Data Analysis (EDA) is the first step before we build any custom models using machine learning. This is a critical and often complex step where in we normalize & clean the data, understand data distribution, outliers, correlations and assess the data for various hypothesis and experiments.
6 |
7 | ## Scenario / Hypothesis
8 |
9 | Our scenario is around predicting failures (quality related) based on machine condition. The telemetry data contains a point in time snapshot of all the sensor values, how these values actually impacted quality failures conditions is logged in a different system. For this sample we will use:
10 |
11 | 1. Simulated Sensor Data
12 | - Generated via an IoT Edge Module
13 | - Contains 40+ different sensor values
14 | - Contain production batch number
15 | 1. Production Quality Data
16 | - Contains production batch number
17 | - Contains quality error code for each batch
18 | - 1 = Meets quality expectations | 0 = Does not meet quality expectations.
19 |
20 | ## High Level Design
21 |
22 | 
23 |
24 | ## Pre-requisites
25 |
26 | - You have [Connectivity Deployment Sample](https://github.com/iotrockstars/iot-iiot/blob/main/1-Connectivity/README.md) working, or have your IIoT data in Data Explorer already.
27 |
28 |
29 | ## Simulate Additional Sensor Data
30 |
31 | - Add new [SimulatedManufacturingSensor module](https://github.com/users/jomit/packages/container/package/simulatedmanufacturingsensors) to the IoT Edge Device created from above sample.
32 |
33 | - In Azure Portal select IoT Hub > IoT Edge > [Your Device] > Set Modules
34 |
35 | - Select Add > IoT Edge Module
36 |
37 |
38 |
39 | - Module Name: `SimulatedManufacturingSensors`, Image URI: `ghcr.io/jomit/simulatedmanufacturingsensors:0.0.1-amd64` and click Add
40 |
41 |
42 |
43 | - Click Next and verify that the `upstream` route value is `FROM /messages/* INTO $upstream`
44 |
45 | - Click Next and Create
46 |
47 | - Wait for few seconds and verify that module is deployed and is sending the logs
48 |
49 |
50 |
51 | - Verify the data in Data Explorer using the query in [VerifySimulatedData.kql](VerifySimulatedData.kql)
52 |
53 |
54 |
55 | ## Upload production quality data
56 |
57 | - Open the data lake created earlier in Azure Portal and upload the `batch-quality-data.csv` file to a folder named `qualitydata`
58 |
59 |
60 |
61 |
62 | ## Create Machine Learning Workspace
63 |
64 | *[Machine Learning workspace](https://docs.microsoft.com/en-us/azure/machine-learning/concept-workspace) provides end to end data science lifecycle management services. It also provides a centralized place collaborate on artifacts around machine learning development and deployment.*
65 |
66 | - Create a new machine learning workspace
67 |
68 | - `az ml workspace create -w iiotml -g iiotsample -l westus2`
69 |
70 | - Create a new compute instance for development. (Compute instances are typically per user so prefix with your name.)
71 |
72 | - `az ml computetarget create computeinstance --name jomitdev --vm-size STANDARD_DS3_V2 -w iiotml -g iiotsample`
73 |
74 | - Go to the Notebooks section in Machine Learning Studio portal and upload the files from `notebooks` folder
75 |
76 |
77 |
78 | ## Create Datastore
79 |
80 | - Open [Machine Learning Studio](https://ml.azure.com/) and select the workspace created above.
81 |
82 | - Create new datastore, to connect with the telemetry data lake that we created before.
83 |
84 |
85 |
86 |
87 | ## Create raw Dataset
88 |
89 | - Open and run [1_create_raw_dataset.ipynb](./notebooks/1_create_raw_dataset.ipynb) notebook
90 |
91 |
92 |
93 | ## Perform Feature Engineering
94 |
95 | - Open and run [2_exploratory_analysis_feature_selection.ipynb](./notebooks/2_exploratory_analysis_feature_selection.ipynb) notebook
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 | ## Perform basic Frequency Analysis
106 |
107 | - Open and run [2_frequency_analysis.ipynb](./notebooks/2_frequency_analysis.ipynb) notebook
108 |
109 |
110 |
111 | ## Build Baseline Model(s)
112 |
113 | - Open and run [3_baseline_modeling.ipynb](./notebooks/3_baseline_modeling.ipynb) notebook
114 |
115 |
116 |
117 | ## Align Business and ML Objectives
118 |
119 | For any Machine Learning project to succeed, it’s crucial to tie Machine Learning metrics with the overall business performance. Here's an example of how you may approach this for quality prediction scenarios:
120 |
121 | 1. Build a baseline of business metrics that you want improve using ML. For example:
122 | - Number of quality failures
123 | - Percentange of scrap
124 | - Additional time spent on quality rework
125 | - Cost of quality failures
126 | - Cost of quality rework
127 | 1. Select machine learning metrics for model performance based on use case / scenario. For example:
128 | - "Precision" attempts to answer: What proportion of positive identifications were actually correct?
129 | - "Recall" attempts to answer : What proportion of actual positives were identified correctly?
130 | - For scenarios where cost of wrong prediction is high, choose higher "precision"
131 | - For scenarios where cost of missing any detection is high, choose higher "recall"
132 | 1. Perform A/B testing and quantify business metric improvements and cost impact as shown in below example:
133 |
134 | | Current | With ML (precision=50%, recall=90%) | Cost Impact |
135 | Number of quality failures per year | 100 | 25 | cost per quality failure - 75% |
136 | Percentage of scrap | 15% | 9% | cost of scrap - 6% |
137 | Additional time spent on quality rework | 10% | 2% | cost of rework - 8% |
138 | ... | ... | ... | ... |
139 |
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/.devcontainer/Dockerfile:
--------------------------------------------------------------------------------
1 | # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.194.0/containers/python-3/.devcontainer/base.Dockerfile
2 |
3 | # [Choice] Python version: 3, 3.9, 3.8, 3.7, 3.6
4 | ARG VARIANT="3.7"
5 | FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
6 |
7 | # Install Docker CE
8 | COPY library-scripts/*.sh /tmp/library-scripts/
9 | RUN \
10 | apt-get update -y \
11 | # Use Docker script from script library to set things up - enable non-root docker, user vscode, using moby
12 | && /bin/bash /tmp/library-scripts/docker-in-docker-debian.sh "true" "automatic" "true" \
13 | # install iotedgehubdev
14 | # && apt-get install -y python3-pip && pip3 install --upgrade pip && pip install iotedgehubdev \
15 | && apt-get install -y python3-pip && pip install iotedgehubdev \
16 | # Clean up
17 | && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts/
18 |
19 | # [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
20 | # COPY requirements.txt /tmp/pip-tmp/
21 | # RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \
22 | # && rm -rf /tmp/pip-tmp
23 |
24 | # launch docker-ce
25 | ENTRYPOINT [ "/usr/local/share/docker-init.sh" ]
26 | CMD [ "sleep", "infinity" ]
27 |
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.194.0/containers/python-3
3 | {
4 | "name": "Azure IoT Edge Python 3.7",
5 | "build": {
6 | "dockerfile": "Dockerfile",
7 | "args": {
8 | // Update 'VARIANT' to pick a Python version: 3, 3.6, 3.7, 3.8, 3.9
9 | "VARIANT": "3.7"
10 | }
11 | },
12 | "runArgs": ["--init", "--privileged"],
13 | "mounts": [
14 | // Keep command history
15 | "source=ostf-bashhistory,target=/commandhistory,type=volume",
16 | // Use docker-in-docker socket
17 | "source=dind-var-lib-docker,target=/var/lib/docker,type=volume"
18 | ],
19 | "overrideCommand": false,
20 |
21 | // Set *default* container specific settings.json values on container create.
22 | "settings": {
23 | "terminal.integrated.profiles.linux": {
24 | "bash": {
25 | "path": "/bin/bash"
26 | }
27 | },
28 | "terminal.integrated.defaultProfile.linux": "bash",
29 | "python.pythonPath": "/usr/local/bin/python",
30 | "python.languageServer": "Pylance",
31 | "python.linting.enabled": true,
32 | "python.linting.pylintEnabled": true,
33 | "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
34 | "python.formatting.blackPath": "/usr/local/py-utils/bin/black",
35 | "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
36 | "python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
37 | "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
38 | "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
39 | "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
40 | "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
41 | "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
42 | },
43 |
44 | // Add the IDs of extensions you want installed when the container is created.
45 | "extensions": [
46 | "ms-python.python",
47 | "ms-python.vscode-pylance",
48 | "ms-azuretools.vscode-docker",
49 | "ms-vscode.azure-account",
50 | "vsciot-vscode.vscode-iot-device-cube",
51 | "vsciot-vscode.azure-iot-tools"
52 | ],
53 |
54 | // Use 'forwardPorts' to make a list of ports inside the container available locally.
55 | // "forwardPorts": [],
56 |
57 | // Use 'postCreateCommand' to run commands after the container is created.
58 | // "postCreateCommand": "pip3 install --user -r requirements.txt",
59 |
60 | "remoteUser": "vscode"
61 | }
62 |
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/.devcontainer/library-scripts/docker-in-docker-debian.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #-------------------------------------------------------------------------------------------------------------
3 | # Copyright (c) Microsoft Corporation. All rights reserved.
4 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
5 | #-------------------------------------------------------------------------------------------------------------
6 | #
7 | # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md
8 | # Maintainer: The VS Code and Codespaces Teams
9 | #
10 | # Syntax: ./docker-in-docker-debian.sh [enable non-root docker access flag] [non-root user] [use moby]
11 |
12 | ENABLE_NONROOT_DOCKER=${1:-"true"}
13 | USERNAME=${2:-"automatic"}
14 | USE_MOBY=${3:-"true"}
15 | MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
16 |
17 | set -e
18 |
19 | if [ "$(id -u)" -ne 0 ]; then
20 | echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
21 | exit 1
22 | fi
23 |
24 | # Determine the appropriate non-root user
25 | if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
26 | USERNAME=""
27 | POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
28 | for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
29 | if id -u ${CURRENT_USER} > /dev/null 2>&1; then
30 | USERNAME=${CURRENT_USER}
31 | break
32 | fi
33 | done
34 | if [ "${USERNAME}" = "" ]; then
35 | USERNAME=root
36 | fi
37 | elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
38 | USERNAME=root
39 | fi
40 |
41 | # Get central common setting
42 | get_common_setting() {
43 | if [ "${common_settings_file_loaded}" != "true" ]; then
44 | curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
45 | common_settings_file_loaded=true
46 | fi
47 | if [ -f "/tmp/vsdc-settings.env" ]; then
48 | local multi_line=""
49 | if [ "$2" = "true" ]; then multi_line="-z"; fi
50 | local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
51 | if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
52 | fi
53 | echo "$1=${!1}"
54 | }
55 |
56 | # Function to run apt-get if needed
57 | apt_get_update_if_needed()
58 | {
59 | if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
60 | echo "Running apt-get update..."
61 | apt-get update
62 | else
63 | echo "Skipping apt-get update."
64 | fi
65 | }
66 |
67 | # Checks if packages are installed and installs them if not
68 | check_packages() {
69 | if ! dpkg -s "$@" > /dev/null 2>&1; then
70 | apt_get_update_if_needed
71 | apt-get -y install --no-install-recommends "$@"
72 | fi
73 | }
74 |
75 | # Ensure apt is in non-interactive to avoid prompts
76 | export DEBIAN_FRONTEND=noninteractive
77 |
78 | # Install dependencies
79 | check_packages apt-transport-https curl ca-certificates lxc pigz iptables gnupg2 dirmngr
80 |
81 | # Swap to legacy iptables for compatibility
82 | if type iptables-legacy > /dev/null 2>&1; then
83 | update-alternatives --set iptables /usr/sbin/iptables-legacy
84 | update-alternatives --set ip6tables /usr/sbin/ip6tables-legacy
85 | fi
86 |
87 | # Install Docker / Moby CLI if not already installed
88 | architecture="$(dpkg --print-architecture)"
89 | if type docker > /dev/null 2>&1 && type dockerd > /dev/null 2>&1; then
90 | echo "Docker / Moby CLI and Engine already installed."
91 | else
92 | # Source /etc/os-release to get OS info
93 | . /etc/os-release
94 | if [ "${USE_MOBY}" = "true" ]; then
95 | # Import key safely (new 'signed-by' method rather than deprecated apt-key approach) and install
96 | get_common_setting MICROSOFT_GPG_KEYS_URI
97 | curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
98 | echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
99 | apt-get update
100 | apt-get -y install --no-install-recommends moby-cli moby-buildx moby-engine
101 | apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for ${VERSION_CODENAME} ${architecture}. Skipping."
102 | else
103 | # Import key safely (new 'signed-by' method rather than deprecated apt-key approach) and install
104 | curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg
105 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list
106 | apt-get update
107 | apt-get -y install --no-install-recommends docker-ce-cli docker-ce
108 | fi
109 | fi
110 |
111 | echo "Finished installing docker / moby"
112 |
113 | # Install Docker Compose if not already installed and is on a supported architecture
114 | if type docker-compose > /dev/null 2>&1; then
115 | echo "Docker Compose already installed."
116 | else
117 | target_compose_arch="${architecture}"
118 | if [ "${target_compose_arch}" != "x86_64" ]; then
119 | # Use pip to get a version that runns on this architecture
120 | if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
121 | apt_get_update_if_needed
122 | apt-get -y install python3-minimal python3-pip libffi-dev python3-venv
123 | fi
124 | export PIPX_HOME=/usr/local/pipx
125 | mkdir -p ${PIPX_HOME}
126 | export PIPX_BIN_DIR=/usr/local/bin
127 | export PYTHONUSERBASE=/tmp/pip-tmp
128 | export PIP_CACHE_DIR=/tmp/pip-tmp/cache
129 | pipx_bin=pipx
130 | if ! type pipx > /dev/null 2>&1; then
131 | pip3 install --disable-pip-version-check --no-warn-script-location --no-cache-dir --user pipx
132 | pipx_bin=/tmp/pip-tmp/bin/pipx
133 | fi
134 | ${pipx_bin} install --system-site-packages --pip-args '--no-cache-dir --force-reinstall' docker-compose
135 | rm -rf /tmp/pip-tmp
136 | else
137 | latest_compose_version=$(basename "$(curl -fsSL -o /dev/null -w "%{url_effective}" https://github.com/docker/compose/releases/latest)")
138 | curl -fsSL "https://github.com/docker/compose/releases/download/${latest_compose_version}/docker-compose-$(uname -s)-${target_compose_arch}" -o /usr/local/bin/docker-compose
139 | chmod +x /usr/local/bin/docker-compose
140 | fi
141 | fi
142 |
143 | # If init file already exists, exit
144 | if [ -f "/usr/local/share/docker-init.sh" ]; then
145 | echo "/usr/local/share/docker-init.sh already exists, so exiting."
146 | exit 0
147 | fi
148 | echo "docker-init doesnt exist..."
149 |
150 | # Add user to the docker group
151 | if [ "${ENABLE_NONROOT_DOCKER}" = "true" ]; then
152 | if ! getent group docker > /dev/null 2>&1; then
153 | groupadd docker
154 | fi
155 |
156 | usermod -aG docker ${USERNAME}
157 | fi
158 |
159 | tee /usr/local/share/docker-init.sh > /dev/null \
160 | << 'EOF'
161 | #!/usr/bin/env bash
162 | #-------------------------------------------------------------------------------------------------------------
163 | # Copyright (c) Microsoft Corporation. All rights reserved.
164 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
165 | #-------------------------------------------------------------------------------------------------------------
166 |
167 | sudoIf()
168 | {
169 | if [ "$(id -u)" -ne 0 ]; then
170 | sudo "$@"
171 | else
172 | "$@"
173 | fi
174 | }
175 |
176 | # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly
177 | # ie: docker kill
178 | sudoIf find /run /var/run -iname 'docker*.pid' -delete || :
179 | sudoIf find /run /var/run -iname 'container*.pid' -delete || :
180 |
181 | set -e
182 |
183 | ## Dind wrapper script from docker team
184 | # Maintained: https://github.com/moby/moby/blob/master/hack/dind
185 |
186 | export container=docker
187 |
188 | if [ -d /sys/kernel/security ] && ! sudoIf mountpoint -q /sys/kernel/security; then
189 | sudoIf mount -t securityfs none /sys/kernel/security || {
190 | echo >&2 'Could not mount /sys/kernel/security.'
191 | echo >&2 'AppArmor detection and --privileged mode might break.'
192 | }
193 | fi
194 |
195 | # Mount /tmp (conditionally)
196 | if ! sudoIf mountpoint -q /tmp; then
197 | sudoIf mount -t tmpfs none /tmp
198 | fi
199 |
200 | # cgroup v2: enable nesting
201 | if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
202 | # move the init process (PID 1) from the root group to the /init group,
203 | # otherwise writing subtree_control fails with EBUSY.
204 | sudoIf mkdir -p /sys/fs/cgroup/init
205 | sudoIf echo 1 > /sys/fs/cgroup/init/cgroup.procs
206 | # enable controllers
207 | sudoIf sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \
208 | > /sys/fs/cgroup/cgroup.subtree_control
209 | fi
210 | ## Dind wrapper over.
211 |
212 | # Handle DNS
213 | set +e
214 | cat /etc/resolv.conf | grep -i 'internal.cloudapp.net'
215 | if [ $? -eq 0 ]
216 | then
217 | echo "Setting dockerd Azure DNS."
218 | CUSTOMDNS="--dns 168.63.129.16"
219 | else
220 | echo "Not setting dockerd DNS manually."
221 | CUSTOMDNS=""
222 | fi
223 | set -e
224 |
225 | # Start docker/moby engine
226 | ( sudoIf dockerd $CUSTOMDNS > /tmp/dockerd.log 2>&1 ) &
227 |
228 | set +e
229 |
230 | # Execute whatever commands were passed in (if any). This allows us
231 | # to set this script to ENTRYPOINT while still executing the default CMD.
232 | exec "$@"
233 | EOF
234 |
235 | chmod +x /usr/local/share/docker-init.sh
236 | chown ${USERNAME}:root /usr/local/share/docker-init.sh
237 |
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/.gitignore:
--------------------------------------------------------------------------------
1 | config/
2 | .env
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "SimulatedManufacturingSensors Remote Debug (Python)",
6 | "type": "python",
7 | "request": "attach",
8 | "port": 5678,
9 | "host": "localhost",
10 | "logToFile": true,
11 | "redirectOutput": true,
12 | "pathMappings": [
13 | {
14 | "localRoot": "${workspaceFolder}/modules/SimulatedManufacturingSensors",
15 | "remoteRoot": "/app"
16 | }
17 | ],
18 | "windows": {
19 | "pathMappings": [
20 | {
21 | "localRoot": "${workspaceFolder}\\modules\\SimulatedManufacturingSensors",
22 | "remoteRoot": "/app"
23 | }
24 | ]
25 | }
26 | },
27 | {
28 | "name": "SimulatedManufacturingSensors Local Debug (Python)",
29 | "type": "python",
30 | "request": "launch",
31 | "program": "${workspaceFolder}/modules/SimulatedManufacturingSensors/main.py",
32 | "console": "integratedTerminal",
33 | "env": {
34 | "EdgeHubConnectionString": "${config:azure-iot-edge.EdgeHubConnectionString}",
35 | "EdgeModuleCACertificateFile": "${config:azure-iot-edge.EdgeModuleCACertificateFile}"
36 | },
37 | "windows": {
38 | "program": "${workspaceFolder}\\modules\\SimulatedManufacturingSensors\\main.py"
39 | }
40 | },
41 | {
42 | "name": "SampleModule Remote Debug (Python)",
43 | "type": "python",
44 | "request": "attach",
45 | "port": 5678,
46 | "host": "localhost",
47 | "logToFile": true,
48 | "redirectOutput": true,
49 | "pathMappings": [
50 | {
51 | "localRoot": "${workspaceFolder}/modules/SampleModule",
52 | "remoteRoot": "/app"
53 | }
54 | ],
55 | "windows": {
56 | "pathMappings": [
57 | {
58 | "localRoot": "${workspaceFolder}\\modules\\SampleModule",
59 | "remoteRoot": "/app"
60 | }
61 | ]
62 | }
63 | },
64 | {
65 | "name": "SampleModule Local Debug (Python)",
66 | "type": "python",
67 | "request": "launch",
68 | "program": "${workspaceFolder}/modules/SampleModule/main.py",
69 | "console": "integratedTerminal",
70 | "env": {
71 | "EdgeHubConnectionString": "${config:azure-iot-edge.EdgeHubConnectionString}",
72 | "EdgeModuleCACertificateFile": "${config:azure-iot-edge.EdgeModuleCACertificateFile}"
73 | },
74 | "windows": {
75 | "program": "${workspaceFolder}\\modules\\SampleModule\\main.py"
76 | }
77 | }
78 | ]
79 | }
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/deployment.debug.template.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema-template": "4.0.0",
3 | "modulesContent": {
4 | "$edgeAgent": {
5 | "properties.desired": {
6 | "schemaVersion": "1.1",
7 | "runtime": {
8 | "type": "docker",
9 | "settings": {
10 | "minDockerVersion": "v1.25",
11 | "loggingOptions": "",
12 | "registryCredentials": {
13 | "ghcr": {
14 | "username": "$CONTAINER_REGISTRY_USERNAME_ghcr",
15 | "password": "$CONTAINER_REGISTRY_PASSWORD_ghcr",
16 | "address": "ghcr.io"
17 | }
18 | }
19 | }
20 | },
21 | "systemModules": {
22 | "edgeAgent": {
23 | "type": "docker",
24 | "settings": {
25 | "image": "mcr.microsoft.com/azureiotedge-agent:1.2",
26 | "createOptions": {}
27 | }
28 | },
29 | "edgeHub": {
30 | "type": "docker",
31 | "status": "running",
32 | "restartPolicy": "always",
33 | "settings": {
34 | "image": "mcr.microsoft.com/azureiotedge-hub:1.2",
35 | "createOptions": {
36 | "HostConfig": {
37 | "PortBindings": {
38 | "5671/tcp": [
39 | {
40 | "HostPort": "5671"
41 | }
42 | ],
43 | "8883/tcp": [
44 | {
45 | "HostPort": "8883"
46 | }
47 | ],
48 | "443/tcp": [
49 | {
50 | "HostPort": "443"
51 | }
52 | ]
53 | }
54 | }
55 | },
56 | "env": {
57 | "UpstreamProtocol": {
58 | "value": "AmqpWs"
59 | }
60 | }
61 | }
62 | }
63 | },
64 | "modules": {
65 | "SimulatedManufacturingSensors": {
66 | "version": "1.0",
67 | "type": "docker",
68 | "status": "running",
69 | "restartPolicy": "always",
70 | "settings": {
71 | "image": "${MODULES.SimulatedManufacturingSensors.debug}",
72 | "createOptions": {}
73 | }
74 | }
75 | }
76 | }
77 | },
78 | "$edgeHub": {
79 | "properties.desired": {
80 | "schemaVersion": "1.2",
81 | "routes": {
82 | "upstream": "FROM /messages/* INTO $upstream"
83 | },
84 | "storeAndForwardConfiguration": {
85 | "timeToLiveSecs": 7200
86 | }
87 | }
88 | }
89 | }
90 | }
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/deployment.template.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema-template": "4.0.0",
3 | "modulesContent": {
4 | "$edgeAgent": {
5 | "properties.desired": {
6 | "schemaVersion": "1.1",
7 | "runtime": {
8 | "type": "docker",
9 | "settings": {
10 | "minDockerVersion": "v1.25",
11 | "loggingOptions": "",
12 | "registryCredentials": {
13 | "ghcr": {
14 | "username": "$CONTAINER_REGISTRY_USERNAME_ghcr",
15 | "password": "$CONTAINER_REGISTRY_PASSWORD_ghcr",
16 | "address": "ghcr.io"
17 | }
18 | }
19 | }
20 | },
21 | "systemModules": {
22 | "edgeAgent": {
23 | "type": "docker",
24 | "settings": {
25 | "image": "mcr.microsoft.com/azureiotedge-agent:1.2",
26 | "createOptions": {}
27 | }
28 | },
29 | "edgeHub": {
30 | "type": "docker",
31 | "status": "running",
32 | "restartPolicy": "always",
33 | "settings": {
34 | "image": "mcr.microsoft.com/azureiotedge-hub:1.2",
35 | "createOptions": {
36 | "HostConfig": {
37 | "PortBindings": {
38 | "5671/tcp": [
39 | {
40 | "HostPort": "5671"
41 | }
42 | ],
43 | "8883/tcp": [
44 | {
45 | "HostPort": "8883"
46 | }
47 | ],
48 | "443/tcp": [
49 | {
50 | "HostPort": "443"
51 | }
52 | ]
53 | }
54 | }
55 | },
56 | "env": {
57 | "UpstreamProtocol": {
58 | "value": "AmqpWs"
59 | }
60 | }
61 | }
62 | }
63 | },
64 | "modules": {
65 | "SimulatedManufacturingSensors": {
66 | "version": "1.0",
67 | "type": "docker",
68 | "status": "running",
69 | "restartPolicy": "always",
70 | "settings": {
71 | "image": "${MODULES.SimulatedManufacturingSensors}",
72 | "createOptions": {}
73 | }
74 | }
75 | }
76 | }
77 | },
78 | "$edgeHub": {
79 | "properties.desired": {
80 | "schemaVersion": "1.2",
81 | "routes": {
82 | "upstream": "FROM /messages/* INTO $upstream"
83 | },
84 | "storeAndForwardConfiguration": {
85 | "timeToLiveSecs": 7200
86 | }
87 | }
88 | }
89 | }
90 | }
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/Dockerfile.amd64:
--------------------------------------------------------------------------------
1 | FROM amd64/python:3.7-slim-buster
2 |
3 | WORKDIR /app
4 |
5 | COPY requirements.txt ./
6 | RUN pip install -r requirements.txt
7 |
8 | COPY . .
9 |
10 | CMD [ "python3", "-u", "./main.py" ]
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/Dockerfile.amd64.debug:
--------------------------------------------------------------------------------
1 | FROM amd64/python:3.7-slim-buster
2 |
3 | WORKDIR /app
4 |
5 | RUN pip install ptvsd==4.1.3
6 | COPY requirements.txt ./
7 | RUN pip install -r requirements.txt
8 |
9 | COPY . .
10 |
11 | CMD [ "python3", "-u", "./main.py" ]
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/Dockerfile.arm32v7:
--------------------------------------------------------------------------------
1 | FROM arm32v7/python:3.7-slim-buster
2 |
3 | WORKDIR /app
4 |
5 | COPY requirements.txt ./
6 | RUN pip install -r requirements.txt
7 |
8 | COPY . .
9 |
10 | CMD [ "python3", "-u", "./main.py" ]
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/Dockerfile.arm32v7.debug:
--------------------------------------------------------------------------------
1 | FROM arm32v7/python:3.7-slim-buster
2 |
3 | WORKDIR /app
4 |
5 | RUN pip install ptvsd==4.1.3
6 | COPY requirements.txt ./
7 | RUN pip install -r requirements.txt
8 |
9 | COPY . .
10 |
11 | CMD [ "python3", "-u", "./main.py" ]
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/Dockerfile.arm64v8:
--------------------------------------------------------------------------------
1 | FROM arm64v8/python:3.7-slim-buster
2 |
3 | WORKDIR /app
4 |
5 | COPY requirements.txt ./
6 | RUN pip install -r requirements.txt
7 |
8 | COPY . .
9 |
10 | CMD [ "python3", "-u", "./main.py" ]
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/Dockerfile.arm64v8.debug:
--------------------------------------------------------------------------------
1 | FROM arm64v8/python:3.7-slim-buster
2 |
3 | WORKDIR /app
4 |
5 | RUN pip install ptvsd==4.1.3
6 | COPY requirements.txt ./
7 | RUN pip install -r requirements.txt
8 |
9 | COPY . .
10 |
11 | CMD [ "python3", "-u", "./main.py" ]
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/main.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft. All rights reserved.
2 | # Licensed under the MIT license. See LICENSE file in the project root for
3 | # full license information.
4 |
5 | import asyncio
6 | import sys
7 | import signal
8 | import threading
9 | from azure.iot.device.aio import IoTHubModuleClient
10 | import pandas as pd
11 | from datetime import datetime
12 |
13 | # Event indicating client stop
14 | stop_event = threading.Event()
15 |
16 | def create_client():
17 | client = IoTHubModuleClient.create_from_edge_environment()
18 |
19 | # # Define function for handling received messages
20 | # async def receive_message_handler(message):
21 | # # NOTE: This function only handles messages sent to "input1".
22 | # # Messages sent to other inputs, or to the default, will be discarded
23 | # if message.input_name == "input1":
24 | # print("the data in the message received on input1 was ")
25 | # print(message.data)
26 | # print("custom properties are")
27 | # print(message.custom_properties)
28 | # print("forwarding mesage to output1")
29 | # await client.send_message_to_output(message, "output1")
30 |
31 | # try:
32 | # # Set handler on the client
33 | # client.on_message_received = receive_message_handler
34 | # except:
35 | # # Cleanup if failure occurs
36 | # client.shutdown()
37 | # raise
38 |
39 | return client
40 |
41 |
42 | async def run_sample(client):
43 | df = pd.read_csv ('simulateddata.csv')
44 | batchId = 1
45 | while True:
46 | for index, row in df.iterrows():
47 | row['BatchNumber'] = batchId
48 | row['SourceTimestamp'] = datetime.now()
49 | message = row.to_json()
50 | print(message)
51 | await client.send_message_to_output(message, "output1")
52 | batchId+=1
53 | await asyncio.sleep(2)
54 |
55 | def main():
56 | if not sys.version >= "3.5.3":
57 | raise Exception( "The sample requires python 3.5.3+. Current version of Python: %s" % sys.version )
58 | print ( "IoT Hub Client for Python" )
59 |
60 | # NOTE: Client is implicitly connected due to the handler being set on it
61 | client = create_client()
62 |
63 | # Define a handler to cleanup when module is is terminated by Edge
64 | def module_termination_handler(signal, frame):
65 | print ("IoTHubClient sample stopped by Edge")
66 | stop_event.set()
67 |
68 | # Set the Edge termination handler
69 | signal.signal(signal.SIGTERM, module_termination_handler)
70 |
71 | # Run the sample
72 | loop = asyncio.get_event_loop()
73 | try:
74 | loop.run_until_complete(run_sample(client))
75 | except Exception as e:
76 | print("Unexpected error %s " % e)
77 | raise
78 | finally:
79 | print("Shutting down IoT Hub Client...")
80 | loop.run_until_complete(client.shutdown())
81 | loop.close()
82 |
83 |
84 | if __name__ == "__main__":
85 | main()
86 |
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/module.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema-version": "0.0.1",
3 | "description": "",
4 | "image": {
5 | "repository": "ghcr.io/jomit/simulatedmanufacturingsensors",
6 | "tag": {
7 | "version": "0.0.1",
8 | "platforms": {
9 | "amd64": "./Dockerfile.amd64",
10 | "amd64.debug": "./Dockerfile.amd64.debug",
11 | "arm32v7": "./Dockerfile.arm32v7",
12 | "arm32v7.debug": "./Dockerfile.arm32v7.debug",
13 | "arm64v8": "./Dockerfile.arm64v8",
14 | "arm64v8.debug": "./Dockerfile.arm64v8.debug"
15 | }
16 | },
17 | "buildOptions": [],
18 | "contextPath": "./"
19 | },
20 | "language": "python"
21 | }
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/SimulatedIndustrialSensors/modules/SimulatedManufacturingSensors/requirements.txt:
--------------------------------------------------------------------------------
1 | azure-iot-device~=2.7.0
2 | pandas
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/VerifySimulatedData.kql:
--------------------------------------------------------------------------------
1 | opcua_raw
2 | | where payload contains "BatchNumber"
3 | | project
4 | BatchNumber = payload.BatchNumber,
5 | SourceTimestamp = unixtime_milliseconds_todatetime(todouble(payload.SourceTimestamp)),
6 | Sensor1 = payload.S1, Sensor2 = payload.S2, Sensor3 = payload.S3, Sensor5 = payload.S5
7 | | order by SourceTimestamp desc
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/moduleContent.json:
--------------------------------------------------------------------------------
1 | {
2 | "modulesContent": {
3 | "$edgeAgent": {
4 | "properties.desired": {
5 | "modules": {
6 | "OPCPublisher": {
7 | "settings": {
8 | "image": "mcr.microsoft.com/iotedge/opc-publisher:2.8.0",
9 | "createOptions": "{\"Hostname\":\"publisher\",\"Cmd\":[\"PkiRootPath=/opcdata/pki\",\"--lf=/opcdata/publisher.log\",\"--pf=/opcdata/opcconfig.json\",\"--aa\",\"--di=60\",\"--me=Json\",\"--mm=PubSub\"],\"HostConfig\":{\"Binds\":[\"/home/iotedge-user/opcconfig:/opcdata\"]}}"
10 | },
11 | "type": "docker",
12 | "version": "1.0",
13 | "status": "running",
14 | "restartPolicy": "always"
15 | },
16 | "SimulatedManufacturingSensors": {
17 | "settings": {
18 | "image": "ghcr.io/jomit/simulatedmanufacturingsensors:0.0.1-amd64",
19 | "createOptions": ""
20 | },
21 | "type": "docker",
22 | "status": "running",
23 | "restartPolicy": "always",
24 | "version": "1.0"
25 | }
26 | },
27 | "runtime": {
28 | "settings": {
29 | "minDockerVersion": "v1.25"
30 | },
31 | "type": "docker"
32 | },
33 | "schemaVersion": "1.1",
34 | "systemModules": {
35 | "edgeAgent": {
36 | "settings": {
37 | "image": "mcr.microsoft.com/azureiotedge-agent:1.1",
38 | "createOptions": "{}"
39 | },
40 | "type": "docker",
41 | "env": {
42 | "UpstreamProtocol": {
43 | "value": "AmqpWs"
44 | }
45 | }
46 | },
47 | "edgeHub": {
48 | "settings": {
49 | "image": "mcr.microsoft.com/azureiotedge-hub:1.1",
50 | "createOptions": "{\"HostConfig\":{\"PortBindings\":{\"5671/tcp\":[{\"HostPort\":\"5671\"}],\"8883/tcp\":[{\"HostPort\":\"8883\"}],\"443/tcp\":[{\"HostPort\":\"443\"}]}}}"
51 | },
52 | "type": "docker",
53 | "env": {
54 | "UpstreamProtocol": {
55 | "value": "AmqpWs"
56 | }
57 | },
58 | "status": "running",
59 | "restartPolicy": "always"
60 | }
61 | }
62 | }
63 | },
64 | "$edgeHub": {
65 | "properties.desired": {
66 | "routes": {
67 | "upstream": "FROM /messages/* INTO $upstream"
68 | },
69 | "schemaVersion": "1.1",
70 | "storeAndForwardConfiguration": {
71 | "timeToLiveSecs": 7200
72 | }
73 | }
74 | }
75 | }
76 | }
--------------------------------------------------------------------------------
/5_ExplorationDataAnalysis/notebooks/3_baseline_modeling-automl.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "nteract": {
7 | "transient": {
8 | "deleting": false
9 | }
10 | }
11 | },
12 | "source": [
13 | "# Load Dataset with inital features"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": null,
19 | "metadata": {},
20 | "outputs": [],
21 | "source": [
22 | "import pandas as pd\n",
23 | "import azureml.core\n",
24 | "from azureml.core import Experiment, Dataset, Datastore, Workspace\n",
25 | "from azureml.train.automl import AutoMLConfig\n",
26 | "from azureml.core.compute import ComputeTarget, AmlCompute\n",
27 | "import logging\n",
28 | "\n",
29 | "ws = Workspace.from_config()\n",
30 | "\n",
31 | "qualitydataset = Dataset.get_by_name(workspace=ws,name=\"iiot_quality_featured_data\")\n",
32 | "qualitydf = qualitydataset.to_pandas_dataframe()\n",
33 | "print(\"Rows => {0}\".format(qualitydf.shape[0]))\n",
34 | "print(\"Columns => {0}\".format(qualitydf.shape[1]))\n",
35 | "qualitydf.head()"
36 | ]
37 | },
38 | {
39 | "cell_type": "markdown",
40 | "metadata": {
41 | "nteract": {
42 | "transient": {
43 | "deleting": false
44 | }
45 | }
46 | },
47 | "source": [
48 | "# Configure Experiment"
49 | ]
50 | },
51 | {
52 | "cell_type": "code",
53 | "execution_count": null,
54 | "metadata": {
55 | "jupyter": {
56 | "outputs_hidden": false,
57 | "source_hidden": false
58 | },
59 | "nteract": {
60 | "transient": {
61 | "deleting": false
62 | }
63 | }
64 | },
65 | "outputs": [],
66 | "source": [
67 | "# Configure Cluster\n",
68 | "cluster_name = \"cpu-cluster\"\n",
69 | "compute_target = ComputeTarget(workspace=ws, name=cluster_name)\n",
70 | "\n",
71 | "# Configure Automated ML settings\n",
72 | "automl_settings = {\n",
73 | " \"n_cross_validations\": 3,\n",
74 | " \"primary_metric\": \"average_precision_score_weighted\",\n",
75 | " \"enable_early_stopping\": True,\n",
76 | " \"max_concurrent_iterations\": 2, # This is a limit for testing purpose, please increase it as per cluster size\n",
77 | " \"experiment_timeout_hours\": 0.25, # This is a time limit for testing purposes, remove it for real use cases, this will drastically limit ablity to find the best model possible\n",
78 | " \"verbosity\": logging.INFO,\n",
79 | "}\n",
80 | "\n",
81 | "label_column_name = \"Quality\"\n",
82 | "\n",
83 | "automl_config = AutoMLConfig(\n",
84 | " task=\"classification\",\n",
85 | " debug_log=\"automl_errors.log\",\n",
86 | " compute_target=compute_target,\n",
87 | " training_data=qualitydataset,\n",
88 | " label_column_name=label_column_name,\n",
89 | " **automl_settings,\n",
90 | ")"
91 | ]
92 | },
93 | {
94 | "cell_type": "markdown",
95 | "metadata": {
96 | "nteract": {
97 | "transient": {
98 | "deleting": false
99 | }
100 | }
101 | },
102 | "source": [
103 | "# Run Experiment"
104 | ]
105 | },
106 | {
107 | "cell_type": "code",
108 | "execution_count": null,
109 | "metadata": {
110 | "jupyter": {
111 | "outputs_hidden": false,
112 | "source_hidden": false
113 | },
114 | "nteract": {
115 | "transient": {
116 | "deleting": false
117 | }
118 | }
119 | },
120 | "outputs": [],
121 | "source": [
122 | "experiment_name = \"Baseline-Modeling-Automl\"\n",
123 | "experiment = Experiment(ws, experiment_name)\n",
124 | "\n",
125 | "remote_run = experiment.submit(automl_config, show_output=False)"
126 | ]
127 | }
128 | ],
129 | "metadata": {
130 | "kernel_info": {
131 | "name": "python38-azureml"
132 | },
133 | "kernelspec": {
134 | "display_name": "Python 3.8 - AzureML",
135 | "language": "python",
136 | "name": "python38-azureml"
137 | },
138 | "language_info": {
139 | "codemirror_mode": {
140 | "name": "ipython",
141 | "version": 3
142 | },
143 | "file_extension": ".py",
144 | "mimetype": "text/x-python",
145 | "name": "python",
146 | "nbconvert_exporter": "python",
147 | "pygments_lexer": "ipython3",
148 | "version": "3.8.5"
149 | },
150 | "nteract": {
151 | "version": "nteract-front-end@1.0.0"
152 | }
153 | },
154 | "nbformat": 4,
155 | "nbformat_minor": 0
156 | }
157 |
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/OperationalizeBatch.md:
--------------------------------------------------------------------------------
1 | # Operationalizing machine learning models with batch pipelines
2 |
3 | Goal of this sample is to acceleratre deployment of [Industrial IoT Prediction Patterns](https://docs.microsoft.com/en-us/azure/architecture/guide/iiot-patterns/iiot-prediction-patterns). There is no one size fits all solution, as there are many considerations, please review them before moving your workload to production.
4 |
5 | In the previous step we used some of the [Exploratory Data Analysis](../5_ExplorationDataAnalysis/README.md) techniques to select initial features for model and also perform a baseline modeling to understand what algorithms may work well with our dataset. For this sample we will select an algorithm and build an end to end machine learning pipeline to a) train and register a new model and b) run the model on new data and store predictions for reporting.
6 |
7 | ## High Level Design
8 |
9 | 
10 |
11 | ## Pre-requisites
12 |
13 | - You have [Exploratory Data Analysis](../5_ExplorationDataAnalysis/README.md) working.
14 |
15 | - Create an autoscaled compute cluster with system assigned identity
16 |
17 | - `az ml computetarget create amlcompute --name "cpu-cluster" --max-nodes 2 --vm-size "STANDARD_DS3_V2" --assign-identity '[system]' -w iiotml -g iiotsample`
18 |
19 | - Assign Database permissions in Data Explorer to above created 'cpu-cluster' managed identity
20 |
21 |
22 |
23 | - Go to the `Notebooks` section in Machine Learning Studio portal and upload the files from `ml-pipelines` folder
24 |
25 | ## Building Model Training Pipeline
26 |
27 | - Open and run [01-build-retrain-pipeline.ipynb](./ml-pipelines/01-build-retrain-pipeline.ipynb) notebook and create a machine learning pipeline that:
28 | 1. Builds and registers train and test datasets.
29 | 2. Builds and registers a new model based on
30 | the features provided as a parameter.
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 | ## Building Model Prediction Pipeline
39 |
40 | - Create a folder in the DataLake named `predictionresults`, associated with the Datastore
41 |
42 | - Open and run [02-build-prediction-pipeline.ipynb](./ml-pipelines/02-build-prediction-pipeline.ipynb) notebook and create prediction pipeline that:
43 | 1. Gets the registered model
44 | 1. Gets the latest sensor data from data explorer
45 | 1. Runs the model and saves the prediction results to data lake
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 | ## Integrating Model Prediction with Synapse Pipelines
54 |
55 | - Create a [Single SQL Database](https://docs.microsoft.com/en-us/azure/azure-sql/database/single-database-create-quickstart?view=azuresql&tabs=azure-portal)
56 |
57 | - Open `Networking` tab and make sure `Allow Azure services and resources to access this server` is (checked)
58 |
59 | - Create a [Synapse Workspace](https://docs.microsoft.com/en-us/azure/synapse-analytics/quickstart-create-workspace) with default settings.
60 |
61 | - Create 3 [Linked Services](https://docs.microsoft.com/en-us/azure/data-factory/concepts-linked-services?tabs=data-factory) in Synapse Workspace connected to:
62 |
63 |
64 |
65 | 1. `Data Lake` that the machine learning prediction pipeline uses to store the results.
66 | 1. `SQL Database` created above
67 | 1. `Machine Learning workspace` used above
68 |
69 | - Create new pipeline using [./synapse-pipelines/QualityPrediction_Pipeline_Template.zip](./synapse-pipelines/QualityPrediction_Pipeline_Template.zip) and select the linked services created above.
70 |
71 |
72 |
73 |
74 |
75 | - Trigger the pipeline manually and verify that the prediction results are stored in Data Lake and also added to SQL Database.
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 | - Optionally: You can setup custom triggers to generate predictions on a regular interval. And also view the prediction results directly from the datalake file using Spark pools and Notebooks as shown [here](./synapse-pipelines/Read%20Quality%20Prediction%20CSV.ipynb)
84 |
85 | ## Reporting Prediction Results
86 |
87 | - Open [./powerbi/PredictionResults.pbix](./powerbi/PredictionResults.pbix) file and connect to the SQL Database.
88 |
89 | *Compare Prediction vs. Actual Quality Results*
90 |
91 |
92 |
93 | *See Sensor Values for each bach along with predictions*
94 |
95 |
96 |
97 | *Compare specific sensor value against all Good vs. NotGood Results Quality Results*
98 |
99 |
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/OperationalizeEdge.md:
--------------------------------------------------------------------------------
1 | :warning: In progress
2 |
3 | # Operationalizing machine learning models on the edge
4 |
5 | Goal of this sample is to acceleratre deployment of [Industrial IoT Prediction Patterns](https://docs.microsoft.com/en-us/azure/architecture/guide/iiot-patterns/iiot-prediction-patterns). There is no one size fits all solution, as there are many considerations, please review them before moving your workload to production.
6 |
7 | In the previous step we used some of the [Exploratory Data Analysis](../5_ExplorationDataAnalysis/README.md) techniques to select initial features for model and also perform a baseline modeling to understand what algorithms may work well with our dataset. For this sample we will
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/OperationalizeNearRealTime.md:
--------------------------------------------------------------------------------
1 | :warning: In progress
2 |
3 | # Operationalizing machine learning models with near real-time api & dashboards
4 |
5 | Goal of this sample is to acceleratre deployment of [Industrial IoT Prediction Patterns](https://docs.microsoft.com/en-us/azure/architecture/guide/iiot-patterns/iiot-prediction-patterns). There is no one size fits all solution, as there are many considerations, please review them before moving your workload to production.
6 |
7 | In the previous step we used some of the [Exploratory Data Analysis](../5_ExplorationDataAnalysis/README.md) techniques to select initial features for model and also perform a baseline modeling to understand what algorithms may work well with our dataset. For this sample we will
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/README.md:
--------------------------------------------------------------------------------
1 | # Operationalizing machine learning based prediction models
2 |
3 | Goal of these samples is to acceleratre deployment of [Industrial IoT Prediction Patterns](https://docs.microsoft.com/en-us/azure/architecture/guide/iiot-patterns/iiot-prediction-patterns). There is no one size fits all solution, as there are many considerations, please review them before moving your workload to production.
4 |
5 |
6 |
7 | 1. [Operationalizing machine learning models with batch pipelines](./OperationalizeBatch.md)
8 | 1. Operationalizing machine learning models with near real-time api and dashboards (*In Progress*)
9 | 1. Operationalizing machine learning models on the edge (*In Progress*)
10 |
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/ml-pipelines/01-build-retrain-pipeline.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "nteract": {
7 | "transient": {
8 | "deleting": false
9 | }
10 | }
11 | },
12 | "source": [
13 | "# Get Azure ML Workspace"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": 1,
19 | "metadata": {
20 | "gather": {
21 | "logged": 1643075499773
22 | },
23 | "jupyter": {
24 | "outputs_hidden": false,
25 | "source_hidden": false
26 | },
27 | "nteract": {
28 | "transient": {
29 | "deleting": false
30 | }
31 | }
32 | },
33 | "outputs": [
34 | {
35 | "name": "stdout",
36 | "output_type": "stream",
37 | "text": [
38 | "Workspace Name: iiotml\n",
39 | "Azure Region: westus2\n",
40 | "Resource Group: iiotsample\n"
41 | ]
42 | }
43 | ],
44 | "source": [
45 | "from azureml.core import Workspace, Environment, Experiment\n",
46 | "ws = Workspace.from_config()\n",
47 | "\n",
48 | "print('Workspace Name: ' + ws.name, \n",
49 | " 'Azure Region: ' + ws.location, \n",
50 | " 'Resource Group: ' + ws.resource_group, \n",
51 | " sep = '\\n')"
52 | ]
53 | },
54 | {
55 | "cell_type": "markdown",
56 | "metadata": {
57 | "nteract": {
58 | "transient": {
59 | "deleting": false
60 | }
61 | }
62 | },
63 | "source": [
64 | "# Create Conda Environment with Dependencies"
65 | ]
66 | },
67 | {
68 | "cell_type": "code",
69 | "execution_count": 2,
70 | "metadata": {
71 | "gather": {
72 | "logged": 1643075502677
73 | },
74 | "jupyter": {
75 | "outputs_hidden": false,
76 | "source_hidden": false
77 | },
78 | "nteract": {
79 | "transient": {
80 | "deleting": false
81 | }
82 | }
83 | },
84 | "outputs": [],
85 | "source": [
86 | "from azureml.core import Environment\n",
87 | "from azureml.core.conda_dependencies import CondaDependencies\n",
88 | "\n",
89 | "train_env = Environment(name=\"train-env\")\n",
90 | "train_conda_deps = CondaDependencies.create(pip_packages=['pandas','azureml-core','azureml-dataset-runtime[fuse]','numpy','scikit-learn','azure-kusto-data[pandas]','lightgbm'])\n",
91 | "train_env.python.conda_dependencies = train_conda_deps"
92 | ]
93 | },
94 | {
95 | "cell_type": "markdown",
96 | "metadata": {
97 | "nteract": {
98 | "transient": {
99 | "deleting": false
100 | }
101 | }
102 | },
103 | "source": [
104 | "# Create Retraining Pipeline"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": 3,
110 | "metadata": {
111 | "gather": {
112 | "logged": 1643075939831
113 | },
114 | "jupyter": {
115 | "outputs_hidden": false,
116 | "source_hidden": false
117 | },
118 | "nteract": {
119 | "transient": {
120 | "deleting": false
121 | }
122 | }
123 | },
124 | "outputs": [],
125 | "source": [
126 | "from azureml.core.compute import AmlCompute\n",
127 | "from azureml.pipeline.core import Pipeline, PipelineParameter\n",
128 | "from azureml.pipeline.steps import PythonScriptStep\n",
129 | "from azureml.core.runconfig import RunConfiguration\n",
130 | "\n",
131 | "compute = AmlCompute(ws, \"cpu-cluster\")\n",
132 | "\n",
133 | "retrainRunConfig = RunConfiguration()\n",
134 | "retrainRunConfig.target = compute\n",
135 | "retrainRunConfig.environment = train_env\n",
136 | "\n",
137 | "modelname_param = PipelineParameter(name=\"modelname\", default_value='iiot-quality-lgbm')\n",
138 | "selected_features_param = PipelineParameter(name=\"selectedfeatures\", default_value=\"S16,S20,S19,S18,S29,S41,S9,S10,S8,S11,S14,S13,S28,S15,S26,S33,S7,S3,S39\")\n",
139 | "\n",
140 | "buildDatasetStep = PythonScriptStep(name=\"build-datasets\",\n",
141 | " script_name=\"build-datasets.py\", \n",
142 | " runconfig = retrainRunConfig, \n",
143 | " compute_target=compute, \n",
144 | " source_directory=\".\",\n",
145 | " allow_reuse=False)\n",
146 | "\n",
147 | "buildModelStep = PythonScriptStep(name=\"build-model\",\n",
148 | " arguments=[\"--modelname\", modelname_param, \"--selectedfeatures\", selected_features_param],\n",
149 | " script_name=\"build-model.py\", \n",
150 | " runconfig = retrainRunConfig, \n",
151 | " compute_target=compute, \n",
152 | " source_directory=\".\",\n",
153 | " allow_reuse=False)\n",
154 | "\n",
155 | "buildModelStep.run_after(buildDatasetStep)"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": 4,
161 | "metadata": {
162 | "gather": {
163 | "logged": 1643075942681
164 | },
165 | "jupyter": {
166 | "outputs_hidden": false,
167 | "source_hidden": false
168 | },
169 | "nteract": {
170 | "transient": {
171 | "deleting": false
172 | }
173 | }
174 | },
175 | "outputs": [
176 | {
177 | "name": "stdout",
178 | "output_type": "stream",
179 | "text": [
180 | "Step build-model is ready to be created [30249118]\n",
181 | "Step build-datasets is ready to be created [5a7ac826]\n"
182 | ]
183 | },
184 | {
185 | "data": {
186 | "text/plain": [
187 | "[]"
188 | ]
189 | },
190 | "execution_count": 4,
191 | "metadata": {},
192 | "output_type": "execute_result"
193 | }
194 | ],
195 | "source": [
196 | "retrainingPipeline = Pipeline(workspace=ws, steps=[buildModelStep])\n",
197 | "\n",
198 | "retrainingPipeline.validate()"
199 | ]
200 | },
201 | {
202 | "cell_type": "code",
203 | "execution_count": null,
204 | "metadata": {
205 | "gather": {
206 | "logged": 1643075948675
207 | },
208 | "jupyter": {
209 | "outputs_hidden": false,
210 | "source_hidden": false
211 | },
212 | "nteract": {
213 | "transient": {
214 | "deleting": false
215 | }
216 | }
217 | },
218 | "outputs": [],
219 | "source": [
220 | "retrainPipelineRun = Experiment(ws, 'Model-Training-Pipeline').submit(retrainingPipeline)"
221 | ]
222 | },
223 | {
224 | "cell_type": "markdown",
225 | "metadata": {
226 | "nteract": {
227 | "transient": {
228 | "deleting": false
229 | }
230 | }
231 | },
232 | "source": [
233 | "# Publish Retraining Pipeline"
234 | ]
235 | },
236 | {
237 | "cell_type": "code",
238 | "execution_count": 7,
239 | "metadata": {
240 | "gather": {
241 | "logged": 1643076201913
242 | },
243 | "jupyter": {
244 | "outputs_hidden": false,
245 | "source_hidden": false
246 | },
247 | "nteract": {
248 | "transient": {
249 | "deleting": false
250 | }
251 | }
252 | },
253 | "outputs": [],
254 | "source": [
255 | "publishedPipeline = retrainPipelineRun.publish_pipeline(name=\"iiot-quality-training\", description=\"sample pipeline to retrain iiot quality prediction model\",version=\"0.1\")\n",
256 | "print(\"Newly published pipeline id => \",publishedPipeline.id)\n",
257 | "print(\"Newly published pipeline endpoint => \", publishedPipeline.endpoint)"
258 | ]
259 | }
260 | ],
261 | "metadata": {
262 | "kernel_info": {
263 | "name": "python38-azureml"
264 | },
265 | "kernelspec": {
266 | "display_name": "Python 3.8 - AzureML",
267 | "language": "python",
268 | "name": "python38-azureml"
269 | },
270 | "language_info": {
271 | "codemirror_mode": {
272 | "name": "ipython",
273 | "version": 3
274 | },
275 | "file_extension": ".py",
276 | "mimetype": "text/x-python",
277 | "name": "python",
278 | "nbconvert_exporter": "python",
279 | "pygments_lexer": "ipython3",
280 | "version": "3.8.5"
281 | },
282 | "microsoft": {
283 | "host": {
284 | "AzureML": {
285 | "notebookHasBeenCompleted": true
286 | }
287 | }
288 | },
289 | "nteract": {
290 | "version": "nteract-front-end@1.0.0"
291 | }
292 | },
293 | "nbformat": 4,
294 | "nbformat_minor": 0
295 | }
296 |
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/ml-pipelines/02-build-prediction-pipeline.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "source": [
6 | "# Get Azure ML Workspace"
7 | ],
8 | "metadata": {
9 | "nteract": {
10 | "transient": {
11 | "deleting": false
12 | }
13 | }
14 | }
15 | },
16 | {
17 | "cell_type": "code",
18 | "source": [
19 | "from azureml.core import Workspace, Environment, Experiment\r\n",
20 | "ws = Workspace.from_config()\r\n",
21 | "\r\n",
22 | "print('Workspace Name: ' + ws.name, \r\n",
23 | " 'Azure Region: ' + ws.location, \r\n",
24 | " 'Resource Group: ' + ws.resource_group, \r\n",
25 | " sep = '\\n')"
26 | ],
27 | "outputs": [],
28 | "execution_count": null,
29 | "metadata": {
30 | "jupyter": {
31 | "source_hidden": false,
32 | "outputs_hidden": false
33 | },
34 | "nteract": {
35 | "transient": {
36 | "deleting": false
37 | }
38 | },
39 | "gather": {
40 | "logged": 1643075499773
41 | }
42 | }
43 | },
44 | {
45 | "cell_type": "markdown",
46 | "source": [
47 | "# Create Conda Environment with Dependencies"
48 | ],
49 | "metadata": {
50 | "nteract": {
51 | "transient": {
52 | "deleting": false
53 | }
54 | }
55 | }
56 | },
57 | {
58 | "cell_type": "code",
59 | "source": [
60 | "from azureml.core import Environment\r\n",
61 | "from azureml.core.conda_dependencies import CondaDependencies\r\n",
62 | "\r\n",
63 | "predict_env = Environment(name=\"predict-env\")\r\n",
64 | "predict_conda_deps = CondaDependencies.create(pip_packages=['pandas','azureml-core','azureml-dataset-runtime[fuse]','numpy','scikit-learn','azure-kusto-data[pandas]','lightgbm'])\r\n",
65 | "predict_env.python.conda_dependencies = predict_conda_deps"
66 | ],
67 | "outputs": [],
68 | "execution_count": null,
69 | "metadata": {
70 | "jupyter": {
71 | "source_hidden": false,
72 | "outputs_hidden": false
73 | },
74 | "nteract": {
75 | "transient": {
76 | "deleting": false
77 | }
78 | },
79 | "gather": {
80 | "logged": 1643075502677
81 | }
82 | }
83 | },
84 | {
85 | "cell_type": "markdown",
86 | "source": [
87 | "# Create Retraining Pipeline"
88 | ],
89 | "metadata": {
90 | "nteract": {
91 | "transient": {
92 | "deleting": false
93 | }
94 | }
95 | }
96 | },
97 | {
98 | "cell_type": "code",
99 | "source": [
100 | "from azureml.core.compute import AmlCompute\r\n",
101 | "from azureml.pipeline.core import Pipeline, PipelineParameter\r\n",
102 | "from azureml.pipeline.steps import PythonScriptStep\r\n",
103 | "from azureml.core.runconfig import RunConfiguration\r\n",
104 | "\r\n",
105 | "compute = AmlCompute(ws, \"cpu-cluster\")\r\n",
106 | "\r\n",
107 | "predictRunConfig = RunConfiguration()\r\n",
108 | "predictRunConfig.target = compute\r\n",
109 | "predictRunConfig.environment = predict_env\r\n",
110 | "\r\n",
111 | "cluster = \"https://iiotmfgdev.westus2.kusto.windows.net\"\r\n",
112 | "db = \"mfgdb\"\r\n",
113 | "query=\"opcua_raw | where payload contains 'BatchNumber' and unixtime_milliseconds_todatetime(todouble(payload.SourceTimestamp)) between (now(-10m).. now()) | mv-apply payload on (extend key = tostring(bag_keys(payload)[0]) | extend value = payload[key] | summarize b = make_bag(pack(key, value)) ) | evaluate bag_unpack(b)\"\r\n",
114 | "\r\n",
115 | "modelname_param = PipelineParameter(name=\"modelname\", default_value='iiot-quality-lgbm')\r\n",
116 | "selected_features_param = PipelineParameter(name=\"selectedfeatures\", default_value=\"S16,S20,S19,S18,S29,S41,S9,S10,S8,S11,S14,S13,S28,S15,S26,S33,S7,S3,S39\")\r\n",
117 | "kustocluster_param = PipelineParameter(name=\"kustocluster\", default_value=cluster)\r\n",
118 | "kustodb_param = PipelineParameter(name=\"kustodb\", default_value=db)\r\n",
119 | "kustoquery_param = PipelineParameter(name=\"kustoquery\", default_value=query)\r\n",
120 | "resultdatastorename_param = PipelineParameter(name=\"resultdatastorename\", default_value='iiotmfgdatalakestore')\r\n",
121 | "resultfilename_param = PipelineParameter(name=\"resultfilename\", default_value='tmpresults.csv')\r\n",
122 | "\r\n",
123 | "predictStep = PythonScriptStep(name=\"predict\",\r\n",
124 | " arguments=[\"--modelname\", modelname_param, \r\n",
125 | " \"--selectedfeatures\", selected_features_param, \r\n",
126 | " \"--kustocluster\", kustocluster_param,\r\n",
127 | " \"--kustodb\", kustodb_param,\r\n",
128 | " \"--kustoquery\",kustoquery_param,\r\n",
129 | " \"--resultdatastorename\", resultdatastorename_param,\r\n",
130 | " \"--resultfilename\", resultfilename_param],\r\n",
131 | " script_name=\"predict.py\", \r\n",
132 | " runconfig = predictRunConfig, \r\n",
133 | " compute_target=compute, \r\n",
134 | " source_directory=\".\",\r\n",
135 | " allow_reuse=False)"
136 | ],
137 | "outputs": [],
138 | "execution_count": null,
139 | "metadata": {
140 | "jupyter": {
141 | "source_hidden": false,
142 | "outputs_hidden": false
143 | },
144 | "nteract": {
145 | "transient": {
146 | "deleting": false
147 | }
148 | },
149 | "gather": {
150 | "logged": 1643075939831
151 | }
152 | }
153 | },
154 | {
155 | "cell_type": "code",
156 | "source": [
157 | "predictPipeline = Pipeline(workspace=ws, steps=[predictStep])\r\n",
158 | "\r\n",
159 | "predictPipeline.validate()"
160 | ],
161 | "outputs": [],
162 | "execution_count": null,
163 | "metadata": {
164 | "jupyter": {
165 | "source_hidden": false,
166 | "outputs_hidden": false
167 | },
168 | "nteract": {
169 | "transient": {
170 | "deleting": false
171 | }
172 | },
173 | "gather": {
174 | "logged": 1643075942681
175 | }
176 | }
177 | },
178 | {
179 | "cell_type": "code",
180 | "source": [
181 | "predictPipelineRun = Experiment(ws, 'Model-Scoring-Pipeline').submit(predictPipeline)"
182 | ],
183 | "outputs": [],
184 | "execution_count": null,
185 | "metadata": {
186 | "jupyter": {
187 | "source_hidden": false,
188 | "outputs_hidden": false
189 | },
190 | "nteract": {
191 | "transient": {
192 | "deleting": false
193 | }
194 | },
195 | "gather": {
196 | "logged": 1643075948675
197 | }
198 | }
199 | },
200 | {
201 | "cell_type": "markdown",
202 | "source": [
203 | "# Publish Retraining Pipeline"
204 | ],
205 | "metadata": {
206 | "nteract": {
207 | "transient": {
208 | "deleting": false
209 | }
210 | }
211 | }
212 | },
213 | {
214 | "cell_type": "code",
215 | "source": [
216 | "publishedPipeline = predictPipelineRun.publish_pipeline(name=\"iiot-quality-prediction\", description=\"sample pipeline to perform batch prediction on iiot data\",version=\"0.1\")\r\n",
217 | "print(\"Newly published pipeline id => \",publishedPipeline.id)\r\n",
218 | "print(\"Newly published pipeline endpoint => \", publishedPipeline.endpoint)"
219 | ],
220 | "outputs": [],
221 | "execution_count": null,
222 | "metadata": {
223 | "jupyter": {
224 | "source_hidden": false,
225 | "outputs_hidden": false
226 | },
227 | "nteract": {
228 | "transient": {
229 | "deleting": false
230 | }
231 | },
232 | "gather": {
233 | "logged": 1643076201913
234 | }
235 | }
236 | }
237 | ],
238 | "metadata": {
239 | "kernelspec": {
240 | "name": "python38-azureml",
241 | "language": "python",
242 | "display_name": "Python 3.8 - AzureML"
243 | },
244 | "language_info": {
245 | "name": "python",
246 | "version": "3.8.5",
247 | "mimetype": "text/x-python",
248 | "codemirror_mode": {
249 | "name": "ipython",
250 | "version": 3
251 | },
252 | "pygments_lexer": "ipython3",
253 | "nbconvert_exporter": "python",
254 | "file_extension": ".py"
255 | },
256 | "microsoft": {
257 | "host": {
258 | "AzureML": {
259 | "notebookHasBeenCompleted": true
260 | }
261 | }
262 | },
263 | "nteract": {
264 | "version": "nteract-front-end@1.0.0"
265 | },
266 | "kernel_info": {
267 | "name": "python38-azureml"
268 | }
269 | },
270 | "nbformat": 4,
271 | "nbformat_minor": 0
272 | }
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/ml-pipelines/build-datasets.py:
--------------------------------------------------------------------------------
1 | from azureml.core import Run
2 | from azureml.core import Dataset, Datastore, Workspace
3 | from sklearn.model_selection import train_test_split
4 |
5 | import argparse
6 | import time
7 | import pandas as pd
8 | import os
9 |
10 | from datetime import timedelta
11 | from azure.kusto.data import KustoClient, KustoConnectionStringBuilder, ClientRequestProperties
12 | from azure.kusto.data.exceptions import KustoServiceError
13 | from azure.kusto.data.helpers import dataframe_from_result_table
14 |
15 | #parser = argparse.ArgumentParser()
16 | #parser.add_argument("--name", required=True)
17 | #args, _ = parser.parse_known_args()
18 |
19 | current_run = None
20 | current_ws = None
21 | dataStoreName = "iiotmfgdatalakestore"
22 | classColumnName = "Quality"
23 |
24 | trainFileName = "/iiot-quality-train.csv"
25 | testFileName = "/iiot-quality-test.csv"
26 | validationFileName = "/iiot-quality-validation.csv"
27 |
28 | trainDatasetName = "iiot-quality-train"
29 | testDatasetName = "iiot-quality-test"
30 |
31 | qualityDataFile = "qualitydata/batch-quality-data.csv"
32 | cluster = "https://iiotmfgdev.westus2.kusto.windows.net"
33 | db = "mfgdb"
34 | query = "opcua_raw | where payload contains 'BatchNumber' and unixtime_milliseconds_todatetime(todouble(payload.SourceTimestamp)) between (datetime(2022-05-04T20:32:00.000Z).. datetime(2022-05-05T00:50:00.000Z)) | mv-apply payload on (extend key = tostring(bag_keys(payload)[0]) | extend value = payload[key] | summarize b = make_bag(pack(key, value)) ) | evaluate bag_unpack(b)"
35 |
36 | def init():
37 | global current_run, current_ws
38 | print("init() is called.")
39 | current_run = Run.get_context()
40 | current_ws = current_run.experiment.workspace
41 | #current_ws = Workspace.from_config()
42 |
43 | def getDataFromKusto():
44 | kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(cluster)
45 | client = KustoClient(kcsb)
46 | response = client.execute(db, query)
47 | return dataframe_from_result_table(response.primary_results[0])
48 |
49 | def buildTrainTestDatasets():
50 | print("buildTrainTestDatasets() is called.")
51 |
52 | # Get telemetry data from Data Explorer for model training
53 | telemetrydf = getDataFromKusto()
54 | telemetrydf["SourceTimestamp"] = pd.to_datetime(telemetrydf["SourceTimestamp"],unit='ms')
55 | print("Rows => {0}".format(telemetrydf.shape[0]))
56 | print("Columns => {0}".format(telemetrydf.shape[1]))
57 | telemetrydf.head(5)
58 |
59 | # Get quality data from data lake via data store
60 | iiotmfgdatalakestore = Datastore.get(current_ws,dataStoreName)
61 | qualitydf = Dataset.Tabular.from_delimited_files(path = [(iiotmfgdatalakestore, qualityDataFile)]).to_pandas_dataframe()
62 | print("Rows => {0}".format(qualitydf.shape[0]))
63 | print("Columns => {0}".format(qualitydf.shape[1]))
64 | qualitydf.head()
65 |
66 | # Join Telemetry and Quality Data
67 | traindf = pd.merge(telemetrydf,qualitydf, on='BatchNumber')
68 | print("Rows => {0}".format(traindf.shape[0]))
69 | print("Columns => {0}".format(traindf.shape[1]))
70 | traindf.head()
71 |
72 | # Upload the training datasets to data lake
73 | train,other = train_test_split(traindf, test_size=0.30, shuffle=True,stratify=traindf[classColumnName],random_state=100)
74 | data_folder = os.path.join(os.getcwd(), 'data')
75 | os.makedirs(data_folder, exist_ok=True)
76 |
77 | train.to_csv("{0}{1}".format(data_folder,trainFileName),index=False)
78 | test,val = train_test_split(other, test_size=0.50, shuffle=True,stratify=other[classColumnName],random_state=100)
79 | test.to_csv("{0}{1}".format(data_folder,testFileName),index=False)
80 | val.to_csv("{0}{1}".format(data_folder,validationFileName),index=False)
81 |
82 | iiotmfgdatalakestore.upload_files(files=["{0}{1}".format(data_folder,trainFileName)], overwrite=True)
83 | iiotmfgdatalakestore.upload_files(files=["{0}{1}".format(data_folder,testFileName)], overwrite=True)
84 | iiotmfgdatalakestore.upload_files(files=["{0}{1}".format(data_folder,validationFileName)], overwrite=True)
85 |
86 | train_dataset = Dataset.Tabular.from_delimited_files(path=[(iiotmfgdatalakestore, trainFileName)])
87 | test_dataset = Dataset.Tabular.from_delimited_files(path=[(iiotmfgdatalakestore, testFileName)])
88 |
89 |
90 | # Register the training and test datasets
91 | train_dataset = train_dataset.register(workspace=current_ws, name=trainDatasetName,
92 | description="iiot quality training dataset",tags={"run_id": current_run.id},
93 | create_new_version=True)
94 |
95 | test_dataset = test_dataset.register(workspace=current_ws, name=testDatasetName,
96 | description="iiot quality test dataset",tags={"run_id": current_run.id},
97 | create_new_version=True)
98 |
99 | print("train / test dataset updated.")
100 |
101 | init()
102 | buildTrainTestDatasets()
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/ml-pipelines/build-model.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 | from azureml.core import Run, Workspace, Environment, Experiment
4 | from azureml.core import Dataset, Datastore, Workspace
5 | from azureml.core.model import Model
6 | from sklearn.pipeline import Pipeline
7 | from sklearn.impute import SimpleImputer
8 | from sklearn.preprocessing import StandardScaler, OneHotEncoder, MinMaxScaler
9 | from sklearn.compose import ColumnTransformer
10 | from sklearn.linear_model import LogisticRegression, RidgeClassifier, SGDClassifier
11 | from sklearn.tree import DecisionTreeClassifier
12 | from sklearn.ensemble import AdaBoostClassifier, GradientBoostingClassifier, RandomForestClassifier
13 | from lightgbm import LGBMClassifier
14 | from sklearn.metrics import classification_report
15 |
16 | import argparse
17 | import time
18 | import pandas as pd
19 | import os
20 | import pickle
21 |
22 | parser = argparse.ArgumentParser()
23 | parser.add_argument("--modelname", type=str, required=True)
24 | parser.add_argument("--selectedfeatures", type=str, required=True)
25 | args, _ = parser.parse_known_args()
26 |
27 | current_run = None
28 | current_ws = None
29 |
30 | trainDatasetName = "iiot-quality-train"
31 | testDatasetName = "iiot-quality-test"
32 | targetColumnName = "Quality"
33 |
34 | modelName = args.modelname #"iiot-quality-lgbm"
35 | modelFileName = modelName + ".pkl"
36 | features = args.selectedfeatures.split(",") #['S16','S20','S19','S18','S29','S41','S9','S10','S8','S11','S14','S13','S28','S15','S26','S33','S7','S3','S39']
37 |
38 | def init():
39 | global current_run, current_ws
40 | print("init() is called.")
41 | current_run = Run.get_context()
42 | current_ws = current_run.experiment.workspace
43 | #current_ws = Workspace.from_config()
44 |
45 | def buildmodel():
46 | print("buildmodel() is called.")
47 |
48 | # Get Datasets
49 | trainds = Dataset.get_by_name(current_ws,trainDatasetName)
50 | testds = Dataset.get_by_name(current_ws,testDatasetName)
51 | traindf = trainds.to_pandas_dataframe()
52 | testdf = testds.to_pandas_dataframe()
53 |
54 | print("Training rows => ",traindf.shape[0])
55 | print("Test rows => ",testdf.shape[0])
56 |
57 | # Train / Test datasets
58 | X_train = traindf[features]
59 | y_train = traindf[targetColumnName]
60 |
61 | X_test = testdf[features]
62 | y_test = testdf[targetColumnName]
63 |
64 | # Replace null values with "median" and normalize all numeric values using MinMaxScaler
65 | numeric_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), ('scaler', MinMaxScaler())])
66 | transformations = ColumnTransformer(transformers=[('num', numeric_transformer, X_train.columns)])
67 |
68 | # Build classifier pipeline with preprocessing steps as above, and LightGBM model
69 | # Only use training dataset for build the model
70 | classifierPipeline = Pipeline(steps=[('preprocessor', transformations),('classifier', LGBMClassifier())])
71 | model = classifierPipeline.fit(X_train, y_train)
72 |
73 | # Run the model and extract predictions
74 | y_pred = classifierPipeline.predict(X_test)
75 |
76 | # Score the model against true values
77 | trainingScore = classifierPipeline.score(X_train, y_train)
78 | testScore = classifierPipeline.score(X_test, y_test)
79 | print('Training set score: {:.4f}'.format(trainingScore))
80 | print('Test set score: {:.4f}'.format(testScore))
81 | print(classification_report(y_test, y_pred))
82 |
83 | current_run.log("training_accuracy",trainingScore)
84 | current_run.log("test_accuracy",testScore)
85 |
86 | # Save and Register Model
87 | pickle.dump(classifierPipeline, open(modelFileName, 'wb'))
88 | modeltags = {
89 | "experiment": current_run.experiment.name,
90 | "run_id": current_run.id,
91 | "train_dataset_name" : trainds.name,
92 | "train_dataset_version" : trainds.version,
93 | "test_dataset_name" : testds.name,
94 | "test_dataset_version" : testds.version
95 | }
96 | model = Model.register(model_path=modelFileName, model_name=modelName,
97 | tags=modeltags, description="Light GBM model model for iiot quality prediction",workspace=current_ws)
98 |
99 | print("model built and registered")
100 |
101 | init()
102 | buildmodel()
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/ml-pipelines/predict.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 | from azureml.core import Run, Workspace, Environment, Experiment
4 | from azureml.core import Dataset, Datastore, Workspace
5 | from azureml.core.model import Model
6 | from sklearn.pipeline import Pipeline
7 | from sklearn.impute import SimpleImputer
8 | from sklearn.preprocessing import StandardScaler, OneHotEncoder, MinMaxScaler
9 | from sklearn.compose import ColumnTransformer
10 | from sklearn.linear_model import LogisticRegression, RidgeClassifier, SGDClassifier
11 | from sklearn.tree import DecisionTreeClassifier
12 | from sklearn.ensemble import AdaBoostClassifier, GradientBoostingClassifier, RandomForestClassifier
13 | from lightgbm import LGBMClassifier
14 | from sklearn.metrics import classification_report
15 |
16 | from datetime import timedelta
17 | from azure.kusto.data import KustoClient, KustoConnectionStringBuilder, ClientRequestProperties
18 | from azure.kusto.data.exceptions import KustoServiceError
19 | from azure.kusto.data.helpers import dataframe_from_result_table
20 | from azureml.data.datapath import DataPath
21 |
22 | import argparse
23 | import time
24 | import os
25 | import pickle
26 | import joblib
27 | import json
28 |
29 | parser = argparse.ArgumentParser()
30 | parser.add_argument("--modelname", type=str, required=True)
31 | parser.add_argument("--selectedfeatures", type=str, required=True)
32 | parser.add_argument("--kustocluster", type=str, required=True)
33 | parser.add_argument("--kustodb", type=str, required=True)
34 | parser.add_argument("--kustoquery", type=str, required=True)
35 | parser.add_argument("--resultdatastorename", type=str, required=True)
36 | parser.add_argument("--resultfilename", type=str, required=True)
37 | args, _ = parser.parse_known_args()
38 |
39 | current_run = None
40 | current_ws = None
41 |
42 | modelName = args.modelname
43 | features = args.selectedfeatures.split(",") #['S16','S20','S19','S18','S29','S41','S9','S10','S8','S11','S14','S13','S28','S15','S26','S33','S7','S3','S39']
44 | resultfilename = args.resultfilename
45 | resultdatastorename = args.resultdatastorename
46 | query = args.kustoquery
47 | cluster = args.kustocluster
48 | db = args.kustodb
49 |
50 | def init():
51 | global current_run, current_ws
52 | print("init() is called.")
53 | current_run = Run.get_context()
54 | current_ws = current_run.experiment.workspace
55 | #current_ws = Workspace.from_config()
56 |
57 |
58 | def getDataFromKusto(q):
59 | kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(cluster)
60 | client = KustoClient(kcsb)
61 | response = client.execute(db, q)
62 | return dataframe_from_result_table(response.primary_results[0])
63 |
64 | def modelprediction():
65 | print("modelprediction() is called.")
66 |
67 | # Get Latest Model
68 | currentmodel = joblib.load(Model.get_model_path(modelName,_workspace=current_ws))
69 |
70 | # Get Latest Data for Prediction
71 | testdf = getDataFromKusto(query)
72 | print(testdf.head(5))
73 |
74 | # Predict Results
75 | predictionResults = currentmodel.predict(testdf[features])
76 | print(predictionResults)
77 |
78 | # Save Prediction Results
79 | resultdf = testdf[features]
80 | resultdf["Prediction"] = predictionResults
81 | resultdf["BatchNumber"] = testdf["BatchNumber"]
82 | resultdf["SourceTimestamp"] = pd.to_datetime(testdf["SourceTimestamp"].astype(int), unit="ms")
83 |
84 | resultsDirectoryName = "predictionresults" # Make sure this matches with the directory name in the Data Lake
85 | data_folder = os.path.join(os.getcwd(), resultsDirectoryName)
86 | os.makedirs(data_folder, exist_ok=True)
87 |
88 | resultdf.to_csv("{0}/{1}".format(data_folder,resultfilename),index=False)
89 |
90 | resultsdatastore = Datastore.get(current_ws,resultdatastorename)
91 | Dataset.File.upload_directory(src_dir=resultsDirectoryName, target=DataPath(resultsdatastore, "/{0}/".format(resultsDirectoryName)), pattern="*.csv", overwrite=True)
92 |
93 | print("model prediction results saved")
94 |
95 | init()
96 | modelprediction()
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/powerbi/PredictionResults.pbix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/6_MachineLearningForIIoT/powerbi/PredictionResults.pbix
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/synapse-pipelines/QualityPrediction_Pipeline_Template.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/6_MachineLearningForIIoT/synapse-pipelines/QualityPrediction_Pipeline_Template.zip
--------------------------------------------------------------------------------
/6_MachineLearningForIIoT/synapse-pipelines/Read Quality Prediction CSV.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "source": [
6 | "# https://docs.microsoft.com/en-us/azure/synapse-analytics/spark/microsoft-spark-utilities?pivots=programming-language-python\r\n",
7 | "\r\n",
8 | "# Data access info\r\n",
9 | "storage_account_name = \"iiotmfgdatalake\"\r\n",
10 | "container_name = f\"raw\"\r\n",
11 | "relative_path = f\"predictionresults/results_4c0ea043-3d03-4373-a0cf-253eb3e66ca1.csv\"\r\n",
12 | "\r\n",
13 | "adls_data_path = f\"abfss://{container_name}@{storage_account_name}.dfs.core.windows.net/{relative_path}\"\r\n",
14 | "print(f\"Reading data from: {adls_data_path}\")\r\n",
15 | "\r\n",
16 | "dataDF = spark.read.csv(adls_data_path)\r\n",
17 | "display(dataDF)"
18 | ],
19 | "outputs": [],
20 | "execution_count": 2,
21 | "metadata": {
22 | "collapsed": false
23 | }
24 | }
25 | ],
26 | "metadata": {
27 | "kernelspec": {
28 | "name": "synapse_pyspark",
29 | "display_name": "Synapse PySpark"
30 | },
31 | "language_info": {
32 | "name": "python"
33 | },
34 | "save_output": true,
35 | "synapse_widget": {
36 | "version": "0.1",
37 | "state": {}
38 | }
39 | },
40 | "nbformat": 4,
41 | "nbformat_minor": 2
42 | }
--------------------------------------------------------------------------------
/7_ImageRecognitionForIIoT/README.md:
--------------------------------------------------------------------------------
1 | :warning: In progress
2 |
3 | # Operationalizing deep learning based image recognition models on the factory floor
4 |
5 | Goal of this sample is to acceleratre deployment of [Industrial IoT Prediction Patterns](TODO). There is no one size fits all solution, as there are many [considerations](TODO), please review them before moving your workload to production.
6 |
7 | ## Coming soon...
--------------------------------------------------------------------------------
/8_DeepReinforcementLearningForIIoT/README.md:
--------------------------------------------------------------------------------
1 | :warning: In progress
2 |
3 | # Control system optimization using Deep Reinforcement Learning (DRL)
4 |
5 | Goal of this sample is to acceleratre deployment of [Industrial IoT Adaptability Patterns](TODO). There is no one size fits all solution, as there are many [considerations](TODO), please review them before moving your workload to production.
6 |
7 | ## Coming soon...
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [project-title] Changelog
2 |
3 |
4 | # x.y.z (yyyy-mm-dd)
5 |
6 | *Features*
7 | * ...
8 |
9 | *Bug Fixes*
10 | * ...
11 |
12 | *Breaking Changes*
13 | * ...
14 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to Industrial IoT Patterns
2 |
3 | This project welcomes contributions and suggestions. Most contributions require you to agree to a
4 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
5 | the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
6 |
7 | When you submit a pull request, a CLA bot will automatically determine whether you need to provide
8 | a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
9 | provided by the bot. You will only need to do this once across all repos using our CLA.
10 |
11 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
12 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
13 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
14 |
15 | - [Code of Conduct](#coc)
16 | - [Issues and Bugs](#issue)
17 | - [Feature Requests](#feature)
18 | - [Submission Guidelines](#submit)
19 |
20 | ## Code of Conduct
21 | Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
22 |
23 | ## Found an Issue?
24 | If you find a bug in the source code or a mistake in the documentation, you can help us by
25 | [submitting an issue](#submit-issue) to the GitHub Repository. Even better, you can
26 | [submit a Pull Request](#submit-pr) with a fix.
27 |
28 | ## Want a Feature?
29 | You can *request* a new feature by [submitting an issue](#submit-issue) to the GitHub
30 | Repository. If you would like to *implement* a new feature, please submit an issue with
31 | a proposal for your work first, to be sure that we can use it.
32 |
33 | * **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
34 |
35 | ## Submission Guidelines
36 |
37 | ### Submitting an Issue
38 | Before you submit an issue, search the archive, maybe your question was already answered.
39 |
40 | If your issue appears to be a bug, and hasn't been reported, open a new issue.
41 | Help us to maximize the effort we can spend fixing issues and adding new
42 | features, by not reporting duplicate issues. Providing the following information will increase the
43 | chances of your issue being dealt with quickly:
44 |
45 | * **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps
46 | * **Version** - what version is affected (e.g. 0.1.2)
47 | * **Motivation for or Use Case** - explain what are you trying to do and why the current behavior is a bug for you
48 | * **Browsers and Operating System** - is this a problem with all browsers?
49 | * **Reproduce the Error** - provide a live example or a unambiguous set of steps
50 | * **Related Issues** - has a similar issue been reported before?
51 | * **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be
52 | causing the problem (line of code or commit)
53 |
54 | You can file new issues by providing the above information at the corresponding repository's issues link: https://github.com/[organization-name]/[repository-name]/issues/new].
55 |
56 | ### Submitting a Pull Request (PR)
57 | Before you submit your Pull Request (PR) consider the following guidelines:
58 |
59 | * Search the repository (https://github.com/[organization-name]/[repository-name]/pulls) for an open or closed PR
60 | that relates to your submission. You don't want to duplicate effort.
61 |
62 | * Make your changes in a new git fork:
63 |
64 | * Commit your changes using a descriptive commit message
65 | * Push your fork to GitHub:
66 | * In GitHub, create a pull request
67 | * If we suggest changes then:
68 | * Make the required updates.
69 | * Rebase your fork and force push to your GitHub repository (this will update your Pull Request):
70 |
71 | ```shell
72 | git rebase master -i
73 | git push -f
74 | ```
75 |
76 | That's it! Thank you for your contribution!
77 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Azure Samples
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Industrial IoT Patterns
2 |
3 | A modern IIoT solution goes beyond moving existing industrial processes and tools to the cloud. It involves transforming your operations and processes, embracing cloud native services, and leveraging the power of machine learning and the intelligent edge to optimize industrial processes.
4 |
5 | There are five key stages for a typical IIoT solution:
6 |
7 | 
8 |
9 | Each stage of an IIoT Solution consists of multiple design patterns. Please review the [Industrial IoT Design Patterns doc](TODO) for considerations and when to use these patterns. Below are samples to implement some of these design patterns.
10 |
11 | ## Getting Started
12 |
13 | 1. [Connectivity with Industrial Assets using OPC UA and Edge for Linux on Windows (EFLOW)](./1_Connectivity/README.md)
14 | 1. [Operational Visibility with Anomaly Detection and Root Cause Analysis](./2_OperationalVisibility/README.md)
15 | 1. [Overall Equipment Effectiveness(OEE) and KPI Calculation Engine](./3_OEECalculationEngine/README.md)
16 | 1. Factory and Supply Chain Digital Twin (*In Progress*)
17 | 1. [Exploratory Data Analysis for failure predictions](./5_ExplorationDataAnalysis/README.md)
18 | 1. [Operationalizing machine learning based prediction models](./6_MachineLearningForIIoT/README.md)
19 | 1. Operationalizing image recognition models on the factory floor (*In Progress*)
20 | 1. Control system optimization using Deep Reinforcement Learning (DRL) (*In Progress*)
--------------------------------------------------------------------------------
/images/adx-dashboard-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-dashboard-1.png
--------------------------------------------------------------------------------
/images/adx-dashboard-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-dashboard-2.png
--------------------------------------------------------------------------------
/images/adx-dashboard-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-dashboard-3.png
--------------------------------------------------------------------------------
/images/adx-dashboard-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-dashboard-4.png
--------------------------------------------------------------------------------
/images/adx-dashboard-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-dashboard-5.png
--------------------------------------------------------------------------------
/images/adx-dashboard-6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-dashboard-6.png
--------------------------------------------------------------------------------
/images/adx-iothub-connection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-iothub-connection.png
--------------------------------------------------------------------------------
/images/adx-query1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-query1.png
--------------------------------------------------------------------------------
/images/adx-query2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-query2.png
--------------------------------------------------------------------------------
/images/adx-webui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/adx-webui.png
--------------------------------------------------------------------------------
/images/alert-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/alert-1.png
--------------------------------------------------------------------------------
/images/alert-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/alert-2.png
--------------------------------------------------------------------------------
/images/alert-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/alert-3.png
--------------------------------------------------------------------------------
/images/alert-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/alert-4.png
--------------------------------------------------------------------------------
/images/alert-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/alert-5.png
--------------------------------------------------------------------------------
/images/alert-6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/alert-6.png
--------------------------------------------------------------------------------
/images/connectivity-sample.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/connectivity-sample.png
--------------------------------------------------------------------------------
/images/eflow-connect.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/eflow-connect.png
--------------------------------------------------------------------------------
/images/eflow-opcpublisher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/eflow-opcpublisher.png
--------------------------------------------------------------------------------
/images/iiot-maturity.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/iiot-maturity.png
--------------------------------------------------------------------------------
/images/iothub-access.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/iothub-access.png
--------------------------------------------------------------------------------
/images/iothub-route-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/iothub-route-1.png
--------------------------------------------------------------------------------
/images/iothub-route-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/iothub-route-2.png
--------------------------------------------------------------------------------
/images/iothub-route-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/iothub-route-3.png
--------------------------------------------------------------------------------
/images/kepware-certs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/kepware-certs.png
--------------------------------------------------------------------------------
/images/kepware-endpoints.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/kepware-endpoints.png
--------------------------------------------------------------------------------
/images/kepware-tags.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/kepware-tags.png
--------------------------------------------------------------------------------
/images/ma-add-feed-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-add-feed-1.png
--------------------------------------------------------------------------------
/images/ma-add-feed-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-add-feed-2.png
--------------------------------------------------------------------------------
/images/ma-add-feed-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-add-feed-3.png
--------------------------------------------------------------------------------
/images/ma-add-feed-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-add-feed-4.png
--------------------------------------------------------------------------------
/images/ma-add-feed-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-add-feed-5.png
--------------------------------------------------------------------------------
/images/ma-anomaly-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-anomaly-1.png
--------------------------------------------------------------------------------
/images/ma-anomaly-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-anomaly-2.png
--------------------------------------------------------------------------------
/images/ma-anomaly-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-anomaly-3.png
--------------------------------------------------------------------------------
/images/ma-anomaly-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-anomaly-4.png
--------------------------------------------------------------------------------
/images/ma-anomaly-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-anomaly-5.png
--------------------------------------------------------------------------------
/images/ma-db-permissions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ma-db-permissions.png
--------------------------------------------------------------------------------
/images/ml-baseline-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-baseline-1.png
--------------------------------------------------------------------------------
/images/ml-datastore.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-datastore.png
--------------------------------------------------------------------------------
/images/ml-eda-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-eda-1.png
--------------------------------------------------------------------------------
/images/ml-eda-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-eda-2.png
--------------------------------------------------------------------------------
/images/ml-eda-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-eda-3.png
--------------------------------------------------------------------------------
/images/ml-eda-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-eda-4.png
--------------------------------------------------------------------------------
/images/ml-eda-freq.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-eda-freq.png
--------------------------------------------------------------------------------
/images/ml-eda.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-eda.png
--------------------------------------------------------------------------------
/images/ml-model-predict-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-model-predict-1.png
--------------------------------------------------------------------------------
/images/ml-model-predict-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-model-predict-2.png
--------------------------------------------------------------------------------
/images/ml-model-predict-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-model-predict-3.png
--------------------------------------------------------------------------------
/images/ml-model-train-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-model-train-1.png
--------------------------------------------------------------------------------
/images/ml-model-train-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-model-train-2.png
--------------------------------------------------------------------------------
/images/ml-model-train-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-model-train-3.png
--------------------------------------------------------------------------------
/images/ml-notebookupload.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-notebookupload.png
--------------------------------------------------------------------------------
/images/ml-ops-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-ops-1.png
--------------------------------------------------------------------------------
/images/ml-ops-batch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-ops-batch.png
--------------------------------------------------------------------------------
/images/ml-qualitydata.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-qualitydata.png
--------------------------------------------------------------------------------
/images/ml-raw-dataset.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-raw-dataset.png
--------------------------------------------------------------------------------
/images/ml-simulatedsensors-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-simulatedsensors-1.png
--------------------------------------------------------------------------------
/images/ml-simulatedsensors-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-simulatedsensors-2.png
--------------------------------------------------------------------------------
/images/ml-simulatedsensors-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-simulatedsensors-3.png
--------------------------------------------------------------------------------
/images/ml-simulatedsensors-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ml-simulatedsensors-4.png
--------------------------------------------------------------------------------
/images/oee-pbi-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/oee-pbi-1.png
--------------------------------------------------------------------------------
/images/oee-pbi-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/oee-pbi-2.png
--------------------------------------------------------------------------------
/images/oee.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/oee.png
--------------------------------------------------------------------------------
/images/operational-visibility-sample.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/operational-visibility-sample.png
--------------------------------------------------------------------------------
/images/powerbi-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/powerbi-1.png
--------------------------------------------------------------------------------
/images/powerbi-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/powerbi-2.png
--------------------------------------------------------------------------------
/images/powerbi-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/powerbi-3.png
--------------------------------------------------------------------------------
/images/sparkpool-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/sparkpool-1.png
--------------------------------------------------------------------------------
/images/sparkpool-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/sparkpool-2.png
--------------------------------------------------------------------------------
/images/sparkpool-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/sparkpool-3.png
--------------------------------------------------------------------------------
/images/synapse-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/synapse-1.png
--------------------------------------------------------------------------------
/images/synapse-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/synapse-2.png
--------------------------------------------------------------------------------
/images/synapse-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/synapse-3.png
--------------------------------------------------------------------------------
/images/synapse-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/synapse-4.png
--------------------------------------------------------------------------------
/images/synapse-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/synapse-5.png
--------------------------------------------------------------------------------
/images/synapse-6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/synapse-6.png
--------------------------------------------------------------------------------
/images/ts-query1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ts-query1.png
--------------------------------------------------------------------------------
/images/ts-query2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/industrial-iot-patterns/d9faca82957b9ef2466867a541be78ece78803b7/images/ts-query2.png
--------------------------------------------------------------------------------
/other/CertificateCeremony.md:
--------------------------------------------------------------------------------
1 | :warning: In progress
2 |
3 | # Generating Certs for Edge/Device
4 |
5 | ### Root Cert VM
6 |
7 | - Create new Ubuntu LTS VM named "certrootvm"
8 |
9 | ### Root CA
10 |
11 | - Setup using existing openssl config
12 |
13 | - wget https://raw.githubusercontent.com/Azure/iotedge/main/tools/CACertificates/openssl_root_ca.cnf -O openssl.cnf
14 |
15 | - mkdir -p certs/csr
16 | - mkdir -p certs/private
17 | - mkdir -p certs/certs
18 | - mkdir -p certs/newcerts
19 | - touch certs/index.txt
20 | - echo 1000 > certs/serial
21 | - export CERTIFICATE_OUTPUT_DIR=certs
22 |
23 | - Generate the Root CA Private Key
24 |
25 | - openssl genrsa -out rootCAPrivateKey.pem 4096
26 |
27 | - Generate the Root CA Public Key
28 |
29 | - openssl req -new -x509 -config openssl.cnf -nodes -days 3650 -key rootCAPrivateKey.pem -out rootCAPublicKey.pem -subj "/CN=Root CA for IoT Edge" -extensions "v3_ca"
30 |
31 | ### Issuing CA
32 |
33 | - Generate the Issuing CA Private Key and CSR
34 |
35 | - openssl req -newkey rsa:4096 -nodes -keyout issuingCAPrivateKey.pem -out issuingCertificateRequest.csr -subj "/CN=Issuing CA for IoT Edge"
36 |
37 | - Generate the Issuing CA Public Key by signing the CSR from the Root CA
38 |
39 | - openssl ca -batch -config openssl.cnf -in issuingCertificateRequest.csr -days 180 -cert rootCAPublicKey.pem -keyfile rootCAPrivateKey.pem -keyform PEM -out issuingCAPublicKey.pem -extensions v3_intermediate_ca
40 |
41 | ### Generate Edge and Device CA
42 |
43 | - Generate the Certificate Request for Edge CA and the Private Key. Notes to be an Edge CA, it needs the extensions: CA and digitalSignature
44 |
45 | - openssl req -newkey rsa:4096 -nodes -keyout IoTEdgeCAPrivateKey.pem -out certificateRequest.csr -subj "/CN=IoT Edge Root CA certedgevm" --addext basicConstraints=critical,CA:TRUE,pathlen:2 --addext keyUsage=keyCertSign,digitalSignature --extensions v3_ca
46 |
47 | - Sign the Certificate Request, insert a random Serial Number and Create the Public Key
48 |
49 | - openssl ca -batch -config openssl.cnf -in certificateRequest.csr -days 180 -cert issuingCAPublicKey.pem -keyfile issuingCAPrivateKey.pem -keyform PEM -out IoTEdgeCAPublicKey.pem -extensions v3_intermediate_ca
50 |
51 | - Create Chain
52 | - rm *.csr
53 | - cat IoTEdgeCAPublicKey.pem issuingCAPublicKey.pem rootCAPublicKey.pem > IoTEdgeCAPublicKeyChain.pem
54 |
55 | ### Get thumbprint
56 |
57 | - openssl x509 -in IoTEdgeCAPublicKey.pem -text -fingerprint
58 |
59 | ### Create IoT Edge Device
60 |
61 | - Create another Ubuntu 20.04 VM named "certedgevm"
62 |
63 | - Create new IoT Edge device in IoT Hub named "certedgevm"
64 |
65 | - Copy cert files from "certrootvm" to "certedgevm"
66 |
67 | - scp ./IoTEdgeCAPublicKeyChain.pem jomit@10.0.0.7:/home/jomit
68 | - scp ./IoTEdgeCAPrivateKey.pem jomit@10.0.0.7:/home/jomit
69 |
70 | ### Install IoT Edge on "certedgevm"
71 |
72 | - https://docs.microsoft.com/en-us/azure/iot-edge/how-to-provision-single-device-linux-x509?view=iotedge-2020-11&tabs=azure-portal%2Cubuntu#install-iot-edge
73 |
74 | - wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
75 | - sudo dpkg -i packages-microsoft-prod.deb
76 | - rm packages-microsoft-prod.deb
77 |
78 | - sudo apt-get update;
79 | - sudo apt-get install moby-engine
80 |
81 | - Update /etc/docker/daemon.json log file
82 |
83 | - sudo apt-get update;
84 | - sudo apt-get install aziot-edge
85 |
86 |
87 | ### Configure Certificates
88 |
89 | - Edge CA
90 | - sudo cp /etc/aziot/config.toml.edge.template /etc/aziot/config.toml
91 |
92 | - sudo mkdir /etc/aziot/certificates
93 | - sudo cp IoTEdgeCAPublicKeyChain.pem /etc/aziot/certificates
94 | - sudo cp IoTEdgeCAPrivateKey.pem /etc/aziot/certificates
95 | - sudo chown aziotcs:aziotcs /etc/aziot/certificates/IoTEdgeCAPublicKeyChain.pem
96 | - sudo chown aziotks:aziotks /etc/aziot/certificates/IoTEdgeCAPrivateKey.pem
97 | - sudo chmod 660 /etc/aziot/certificates/IoTEdgeCAP*.pem
98 | - sudo nano /etc/aziot/config.toml
99 |
100 | - "Update the file as below"
101 |
102 | echo 'hostname = "EdgeHubHA"' > config.toml
103 | echo '[provisioning]' >> config.toml
104 | echo ' source = "manual"' >> config.toml
105 | echo ' connection_string = "HostName=youriothub.azure-devices.net;DeviceId=hostname;SharedAccessKey=+fdwfYOURKEYHEREwM="' >> config.toml
106 | echo '[edge_ca]' >> config.toml
107 | echo ' cert = "file:///etc/aziot/certificates/IoTEdgeCAPublicKeyChain.pem"' >> config.toml
108 | echo ' pk = "file:///etc/aziot/certificates/IoTEdgeCAPrivateKey.pem"' >> config.toml
109 |
110 |
111 | - sudo rm /var/lib/aziot/edged/cache/provisioning_state
112 | - sudo iotedge config apply
113 |
114 | - sudo iotedge system status
115 | - sudo iotedge system logs
116 | - sudo iotedge check
117 |
118 | - openssl s_client -showcerts -servername 127.0.0.1 -connect 127.0.0.1:443 < /dev/null | grep CN
119 |
--------------------------------------------------------------------------------
/other/openssl_root_ca.cnf:
--------------------------------------------------------------------------------
1 | # OpenSSL root CA configuration file.
2 |
3 | [ ca ]
4 | default_ca = CA_default
5 |
6 | [ CA_default ]
7 | # Directory and file locations.
8 | dir = $ENV::CERTIFICATE_OUTPUT_DIR
9 | certs = $dir/certs
10 | crl_dir = $dir/crl
11 | new_certs_dir = $dir/newcerts
12 | database = $dir/index.txt
13 | serial = $dir/serial
14 | RANDFILE = $dir/.rnd
15 | # We enable this solely to facilitate non production and/or test automation environments.
16 | # This flag essentially allows the creation of certificates with the same subject name
17 | # (or distinguishing name DN). What this essentially boils down to is if a certificate
18 | # was created with a specific subject name it can be re-created and thus overwrites
19 | # the previous certificate and key. Without this openssl fails to re-create the certificate.
20 | # With this change, cert gen becomes more amenable to CRUD operations during test runs.
21 | unique_subject = no
22 |
23 | # The root key and root certificate.
24 | private_key = $dir/private/azure-iot-test-only.root.ca.key.pem
25 | certificate = $dir/certs/azure-iot-test-only.root.ca.cert.pem
26 |
27 | # For certificate revocation lists.
28 | crlnumber = $dir/crlnumber
29 | crl = $dir/crl/azure-iot-test-only.intermediate.crl.pem
30 | crl_extensions = crl_ext
31 | default_crl_days = 30
32 |
33 | # SHA-1 is deprecated, so use SHA-2 instead.
34 | default_md = sha256
35 |
36 | name_opt = ca_default
37 | cert_opt = ca_default
38 | default_days = 375
39 | preserve = no
40 | policy = policy_loose
41 |
42 | [ policy_strict ]
43 | # The root CA should only sign intermediate certificates that match.
44 | countryName = optional
45 | stateOrProvinceName = optional
46 | organizationName = optional
47 | organizationalUnitName = optional
48 | commonName = supplied
49 | emailAddress = optional
50 |
51 | [ policy_loose ]
52 | # Allow the intermediate CA to sign a more diverse range of certificates.
53 | countryName = optional
54 | stateOrProvinceName = optional
55 | localityName = optional
56 | organizationName = optional
57 | organizationalUnitName = optional
58 | commonName = supplied
59 | emailAddress = optional
60 |
61 | [ req ]
62 | default_bits = 2048
63 | distinguished_name = req_distinguished_name
64 | string_mask = utf8only
65 |
66 | # SHA-1 is deprecated, so use SHA-2 instead.
67 | default_md = sha256
68 |
69 | # Extension to add when the -x509 option is used.
70 | x509_extensions = v3_ca
71 |
72 | [ req_distinguished_name ]
73 | # See .
74 | countryName = Country Name (2 letter code)
75 | stateOrProvinceName = State or Province Name
76 | localityName = Locality Name
77 | 0.organizationName = Organization Name
78 | organizationalUnitName = Organizational Unit Name
79 | commonName = Common Name
80 | emailAddress = Email Address
81 |
82 | # Optionally, specify some defaults.
83 | countryName_default = US
84 | stateOrProvinceName_default = WA
85 | localityName_default =
86 | 0.organizationName_default = My Organization
87 | organizationalUnitName_default =
88 | emailAddress_default =
89 |
90 | [ v3_ca ]
91 | # Extensions for a typical CA.
92 | subjectKeyIdentifier = hash
93 | authorityKeyIdentifier = keyid:always,issuer:always
94 | basicConstraints = critical, CA:true
95 | keyUsage = critical, digitalSignature, cRLSign, keyCertSign
96 |
97 | [ v3_intermediate_ca ]
98 | # Extensions for a typical intermediate CA.
99 | subjectKeyIdentifier = hash
100 | authorityKeyIdentifier = keyid:always,issuer:always
101 | basicConstraints = critical, CA:true
102 | keyUsage = critical, digitalSignature, cRLSign, keyCertSign
103 |
104 | [ usr_cert ]
105 | # Extensions for client certificates.
106 | basicConstraints = CA:FALSE
107 | nsCertType = client, email
108 | nsComment = "OpenSSL Generated Client Certificate"
109 | subjectKeyIdentifier = hash
110 | authorityKeyIdentifier = keyid:always,issuer:always
111 | keyUsage = critical, nonRepudiation, digitalSignature, keyEncipherment
112 | extendedKeyUsage = clientAuth, emailProtection
113 |
114 | [ server_cert ]
115 | # Extensions for server certificates.
116 | basicConstraints = CA:FALSE
117 | nsCertType = server
118 | nsComment = "OpenSSL Generated Server Certificate"
119 | subjectKeyIdentifier = hash
120 | authorityKeyIdentifier = keyid:always,issuer:always
121 | keyUsage = critical, digitalSignature, keyEncipherment
122 | extendedKeyUsage = serverAuth
123 |
124 | [ crl_ext ]
125 | # Extension for CRLs.
126 | authorityKeyIdentifier=keyid:always
127 |
128 | [ ocsp ]
129 | # Extension for OCSP signing certificates.
130 | basicConstraints = CA:FALSE
131 | subjectKeyIdentifier = hash
132 | authorityKeyIdentifier = keyid,issuer
133 | keyUsage = critical, digitalSignature
134 | extendedKeyUsage = critical, OCSPSigning
--------------------------------------------------------------------------------