├── .gitignore ├── INSTALL-1.md ├── INSTALL-2.md ├── INSTALL-3.md ├── LICENSE ├── README.md ├── UNINSTALL.md └── artifacts ├── factory-agent ├── .env_template ├── Dockerfile ├── Plugins │ └── DataAnalysis │ │ └── KustoQL │ │ ├── config.json │ │ └── skprompt.txt ├── frontend.py ├── frontend_config.yml ├── images │ └── smart-factory.jpeg ├── requirements.txt └── sample_questions.json ├── media ├── UNS.png ├── ansible-prov-cloud-1.png ├── ansible-prov-cloud-2.png ├── architecture-overview.png ├── azure-deployed-1-1.png ├── azure-deployed-1.png ├── azure-deployed-2-2.png ├── azure-deployed-2.png ├── azure-deployed-3.png ├── cloud-uninstall.png ├── demo-video.gif ├── edge-deployed-1.png ├── edge-deployed-2.png ├── edge-deployed-3.png ├── evh-messages.png ├── fabric-home.png ├── fabric-tables.png ├── fabric_assets-1.png ├── fabric_assets-2.png ├── fabric_eventstream-1-1.png ├── fabric_eventstream-1-2.png ├── fabric_eventstream-1-3.png ├── fabric_eventstream-1.png ├── fabric_eventstream-2.png ├── fabric_operators-1.png ├── fabric_operators-2.png ├── fabric_products-1.png ├── fabric_products-2.png ├── factory-agent-communication-flow.png ├── factory-agent-prompt.png ├── factory-assistant-ui.png ├── key-components.png ├── mqttui.png └── simulation.png ├── mqtt-data-simulator ├── Dockerfile ├── README.md ├── config.json ├── requirements.txt └── simulator.py └── templates ├── azure-iot-operations └── dataflows │ ├── bronze-to-silver.yaml │ ├── dss │ ├── dss_set │ ├── operators.json │ └── products.json │ ├── json-after-enrichment.json │ ├── json-before-enrichment.json │ └── silver-to-cloud.yaml ├── deploy ├── 1_cloud-provision.yaml ├── 2_edge-install_aio.yaml ├── 3_edge-deploy_demo_components.yaml ├── azure-vm.json └── variables_template.yaml ├── fabric └── reference-datasets │ ├── assets.csv │ ├── operators.csv │ └── products.csv └── k3s ├── flux ├── mqtt │ └── kustomization.yaml └── simulator │ └── kustomization.yaml └── pods ├── mqtt-client └── pod.yaml ├── opc-plc └── opc-plc-deployment.yaml └── simulator ├── configuration.yaml └── deployment.yaml /.gitignore: -------------------------------------------------------------------------------- 1 | **/.env 2 | **/variables.yaml 3 | **/__pycache__/ 4 | vale.ini 5 | styles/ -------------------------------------------------------------------------------- /INSTALL-1.md: -------------------------------------------------------------------------------- 1 | # Part 1 - Provision resources (Cloud & Edge) 2 | 3 | ## Prepare and provision Cloud Platform 4 | You can choose between 2 options: 5 | - [Option 1](#option-1---automated-installation) : automated installation with `Ansible` (Infra as Code) 6 | - [Option 2](#option-2---manual-installation) : manual installation using Azure CLI 7 | 8 | ### Option 1 - Automated installation 9 | - Download the file [`1_cloud-provision.yaml`](./artifacts/templates/deploy/1_cloud-provision.yaml) 10 | - Download the file [`variables_template.yaml`](./artifacts/templates/deploy/variables_template.yaml) and rename it to `variables.yaml` 11 | - Define variables in file `variables.yaml` to create Azure resources: 12 | ```bash 13 | SUBSCRIPTION_ID: "" # Id of your Azure subscription 14 | LOCATION: "" # Location (Azure region) where to create resources 15 | RESOURCE_GROUP: "" # Name of the Resource Group 16 | KEYVAULT_NAME: "" # Name of the Key Vault 17 | STORAGEACCOUNT_NAME: "" # Name of the Storage Account. Length: 3-24. Valid Characters: lowercase letters and numbers. 18 | AIO_SERVICE_PRINCIPAL: "" # Name of the Service Principal (service account) to manage Azure from the Edge Cluster, using Azure CLI (command-line interface) 19 | AIO_MANAGED_IDENTITY_SECRETS: "" # Name of the Managed Identity for Azure IoT Operations secrets 20 | AIO_MANAGED_IDENTITY_COMPONENTS: "" # Name of the Managed Identity for Azure IoT Operations components 21 | AIO_SCHEMA_REGISTRY_NAMESPACE: "" # Name of the Schema Registry. Valid Characters: lowercase letters and numbers. 22 | AIO_CLUSTER_NAME: "" # Name of the Azure IoT Operations Cluster you want to deploy 23 | EVENTHUB_NAMESPACE: "" # Name of the Event Hub Namespace 24 | EVENTHUB_NAME: "" # Name of the Event Hub inside the Event Hub Namespace 25 | FACTORY_AGENT_SERVICE_PRINCIPAL: "" # Name of the Service Principal (service account) for the Factory Agent 26 | AZURE_OPENAI_NAME: "" # Name of the Azure Open AI service 27 | ``` 28 | - Open a browser and navigate to the [Azure Portal](https://portal.azure.com/) 29 | - Use the [Azure Cloud Shell (**Bash**)](https://learn.microsoft.com/en-us/azure/cloud-shell/get-started/ephemeral?tabs=azurecli#start-cloud-shell) 30 | - Once the variables defined in file `variables.yaml`, upload the files `variables.yaml` and `1_cloud-provision.yaml` via `Manage files` > `Upload`. 31 | - Execute the playbook in Azure Cloud Shell to provision Azure Cloud resources 32 | ```bash 33 | ansible-playbook 1_cloud-provision.yaml 34 | ``` 35 | - You should see the following when the playbook has finished successfully: 36 | ![ansible-prov-cloud-1](./artifacts/media/ansible-prov-cloud-1.png "ansible-prov-cloud-1") 37 | ![ansible-prov-cloud-2](./artifacts/media/ansible-prov-cloud-2.png "ansible-prov-cloud-2") 38 | - Now, open the `variables.yaml` file. It should contain additional information at the end (BEGIN/END ANSIBLE MANAGED BLOCK): 39 | ```bash 40 | # BEGIN ANSIBLE MANAGED BLOCK 41 | AIO_SP_APPID: "" 42 | AIO_SP_SECRET: "" 43 | AIO_SCHEMA_REGISTRY_ID: "" 44 | AIO_MANAGED_IDENTITY_SECRETS_ID: "" 45 | AIO_MANAGED_IDENTITY_COMPONENTS_ID: "" 46 | TENANT: "" 47 | ARC_OBJECT_ID: "" 48 | KEYVAULT_ID: "" 49 | FACTORY_AGENT_SP_APPID: "" 50 | FACTORY_AGENT_SP_SECRET: "" 51 | EVENTHUB_ID: "" 52 | EVENTHUB_KEY: "" 53 | # END ANSIBLE MANAGED BLOCK 54 | ``` 55 | - Download the file `variables.yaml` via `Manage files` > `Download` > type `variables.yaml` > `Download`. 56 | - Copy the file `variables.yaml` to your Edge Cluster. 57 | 58 | ### Option 2 - Manual installation 59 | - Open a browser and navigate to the [Azure Portal](https://portal.azure.com/) 60 | - Use the [Azure Cloud Shell (**Bash**)](https://learn.microsoft.com/en-us/azure/cloud-shell/get-started/ephemeral?tabs=azurecli#start-cloud-shell) 61 | - Execute the following commands in Azure Cloud Shell (Bash): 62 | - Set Environment Variables for services to create in Azure: 63 | ```bash 64 | export TTYF_SUBSCRIPTION_ID="" 65 | export TTYF_LOCATION="" 66 | 67 | prefix="ttyf" 68 | random=$(tr -dc 'a-z' < /dev/urandom | fold -w 4 | head -n 1)$(date +%y%m%d) 69 | export TTYF_RESOURCE_GROUP="${prefix}-rg" 70 | export TTYF_KEYVAULT_NAME="${prefix}${random}kv" 71 | export TTYF_STORAGE_ACCOUNT_NAME="${prefix}${random}sa" 72 | export TTYF_SCHEMA_REGISTRY_NAMESPACE="${prefix}${random}srns" 73 | export TTYF_SCHEMA_REGISTRY_NAME="aio" 74 | export TTYF_AIO_CLUSTER_NAME="${prefix}${random}aiocl" 75 | export TTYF_EVENTHUB_NAMESPACE="${prefix}${random}evhns" 76 | export TTYF_EVENTHUB_NAME="aio" 77 | export TTYF_AZURE_OPENAI_NAME="${prefix}${random}aoai" 78 | export TTYF_AZURE_OPENAI_DEPLOYMENT_NAME="talk-to-your-factory" 79 | export TTYF_AIO_SERVICE_PRINCIPAL="${prefix}-aio-sp" 80 | export TTYF_AIO_MI_SECRETS="aio-secrets" 81 | export TTYF_AIO_MI_COMPONENTS="aio-components" 82 | export TTYF_FACTORY_AGENT_SERVICE_PRINCIPAL="${prefix}-agent-sp" 83 | ``` 84 | - Select Azure Subscription: 85 | ```bash 86 | az account set --subscription $TTYF_SUBSCRIPTION_ID 87 | ``` 88 | #### Azure IoT Operations prerequisites 89 | - Register required Resource Providers (execute this step only once per subscription): 90 | ```bash 91 | az provider register -n "Microsoft.ExtendedLocation" 92 | az provider register -n "Microsoft.Kubernetes" 93 | az provider register -n "Microsoft.KubernetesConfiguration" 94 | az provider register -n "Microsoft.IoTOperations" 95 | az provider register -n "Microsoft.DeviceRegistry" 96 | az provider register -n "Microsoft.SecretSyncController" 97 | ``` 98 | - Install Azure CLI extension for Azure IoT Operations: 99 | ```bash 100 | az extension add --upgrade --name azure-iot-ops 101 | ``` 102 | - Create a Resource Group: 103 | ```bash 104 | az group create --location $TTYF_LOCATION --resource-group $TTYF_RESOURCE_GROUP --subscription $TTYF_SUBSCRIPTION_ID 105 | ``` 106 | - Create a Managed Identity for Azure IoT Operations (components): 107 | ```bash 108 | az identity create --resource-group $TTYF_RESOURCE_GROUP --name $TTYF_AIO_MI_COMPONENTS 109 | ``` 110 | - Create a Managed Identity for Azure IoT Operations (secrets): 111 | ```bash 112 | az identity create --resource-group $TTYF_RESOURCE_GROUP --name $TTYF_AIO_MI_SECRETS 113 | ``` 114 | - Create a storage account with `hierarchical namespace enabled`: 115 | ```bash 116 | az storage account create --name $TTYF_STORAGE_ACCOUNT_NAME --resource-group $TTYF_RESOURCE_GROUP --enable-hierarchical-namespace 117 | ``` 118 | - Create a schema registry that connects to your storage account: 119 | ```bash 120 | az iot ops schema registry create --name $TTYF_SCHEMA_REGISTRY_NAME --resource-group $TTYF_RESOURCE_GROUP --registry-namespace $TTYF_SCHEMA_REGISTRY_NAMESPACE --sa-resource-id $(az storage account show --name $TTYF_STORAGE_ACCOUNT_NAME --resource-group $TTYF_RESOURCE_GROUP -o tsv --query id) 121 | ``` 122 | - Create a Key Vault: 123 | ```bash 124 | az keyvault create --enable-rbac-authorization false --name $TTYF_KEYVAULT_NAME --resource-group $TTYF_RESOURCE_GROUP 125 | ``` 126 | - Assign 'Key Vault Secrets Officer' role to Managed Identity for Azure IoT Operations (secrets): 127 | ```bash 128 | az role assignment create --role "Key Vault Secrets Officer" --assignee $(az identity show --name $TTYF_AIO_MI_SECRETS --resource-group $TTYF_RESOURCE_GROUP --query principalId -o tsv) --scope $(az keyvault show --name $TTYF_KEYVAULT_NAME --resource-group $TTYF_RESOURCE_GROUP --query id -o tsv) 129 | ``` 130 | #### Data Streaming Ingestion prerequisites 131 | - Create an Event Hub namespace: 132 | ```bash 133 | az eventhubs namespace create --name $TTYF_EVENTHUB_NAMESPACE --resource-group $TTYF_RESOURCE_GROUP --location $TTYF_LOCATION 134 | ``` 135 | - Create an Event Hub: 136 | ```bash 137 | az eventhubs eventhub create --name $TTYF_EVENTHUB_NAME --resource-group $TTYF_RESOURCE_GROUP --namespace-name $TTYF_EVENTHUB_NAMESPACE 138 | ``` 139 | - Create an Event Hub Consumer Group: 140 | ```bash 141 | az eventhubs eventhub consumer-group create --consumer-group-name "Fabric" --namespace-name $TTYF_EVENTHUB_NAMESPACE --eventhub-name $TTYF_EVENTHUB_NAME --resource-group $TTYF_RESOURCE_GROUP 142 | ``` 143 | - Retrieve the Event Hub Connection String and create 2 variables: 144 | ```bash 145 | EVENTHUB_KEY_CREATE=$(az eventhubs namespace authorization-rule create --resource-group $TTYF_RESOURCE_GROUP --namespace-name $TTYF_EVENTHUB_NAMESPACE --name Listen --rights Listen) 146 | EVENTHUB_KEY_INFO=$(az eventhubs namespace authorization-rule keys list --resource-group $TTYF_RESOURCE_GROUP --namespace-name $TTYF_EVENTHUB_NAMESPACE --name Listen) 147 | export TTYF_EVENTHUB_KEY=$(echo $EVENTHUB_KEY_INFO | jq -r .primaryKey) 148 | ``` 149 | #### Factory Agent prerequisites 150 | - Create an Azure OpenAI resource: 151 | ```bash 152 | az cognitiveservices account create --name $TTYF_AZURE_OPENAI_NAME --resource-group $TTYF_RESOURCE_GROUP --location "swedencentral" --kind "OpenAI" --sku "S0" --subscription $TTYF_SUBSCRIPTION_ID 153 | ``` 154 | - Deploy LLM in Azure OpenAI: 155 | ```bash 156 | az cognitiveservices account deployment create --resource-group $TTYF_RESOURCE_GROUP --name $TTYF_AZURE_OPENAI_NAME --deployment-name $TTYF_AZURE_OPENAI_DEPLOYMENT_NAME --model-name "gpt-4o-mini" --model-version "2024-07-18" --model-format "OpenAI" --sku-capacity "250" --sku-name "GlobalStandard" 157 | ``` 158 | - Create a service principal (service account) for the Factory Assistant: 159 | ```bash 160 | SPN_Factory_Agent=$(az ad sp create-for-rbac --name $TTYF_FACTORY_AGENT_SERVICE_PRINCIPAL) 161 | export TTYF_FACTORY_AGENT_SP_APPID=$(echo $SPN_Factory_Agent | jq -r .appId) 162 | export TTYF_FACTORY_AGENT_SP_SECRET=$(echo $SPN_Factory_Agent | jq -r .password) 163 | ``` 164 | #### Edge Gateway prerequisites 165 | - Create a service principal (service account) to manage Azure from the Edge Gateway running Azure IoT Operations: 166 | ```bash 167 | SPN_Edge=$(az ad sp create-for-rbac --name $TTYF_AIO_SERVICE_PRINCIPAL --role Contributor --scopes /subscriptions/$TTYF_SUBSCRIPTION_ID/resourceGroups/$TTYF_RESOURCE_GROUP) 168 | export TTYF_AIO_SP_APPID=$(echo $SPN_Edge | jq -r .appId) 169 | export TTYF_AIO_SP_SECRET=$(echo $SPN_Edge | jq -r .password) 170 | export TTYF_TENANT=$(echo $SPN_Edge | jq -r .tenant) 171 | ``` 172 | - Assign role to the service principal `AIO_SP_APPID` 173 | ```bash 174 | az role assignment create --assignee $TTYF_AIO_SP_APPID --role "Role Based Access Control Administrator" --scope subscriptions/$TTYF_SUBSCRIPTION_ID/resourceGroups/$TTYF_RESOURCE_GROUP 175 | ``` 176 | - Get `objectId` from `Microsoft Entra ID` for Azure Arc application and create 1 variable: 177 | ```bash 178 | export TTYF_ARC_OBJECT_ID=$(az ad sp show --id bc313c14-388c-4e7d-a58e-70017303ee3b --query id --output tsv) 179 | ``` 180 | 181 | #### Display the variables you created and keep a note of them for future use 182 | ```bash 183 | printenv | grep TTYF_ 184 | ``` 185 | 186 | #### Resources after provisioning 187 | You should now see the following resources in Azure (names may vary depending on the variables you defined): 188 | - Resource Group 189 | ![azure-deployed-1](./artifacts/media/azure-deployed-1.png "azure-deployed-1") 190 | - Entra ID 191 | ![azure-deployed-1-1](./artifacts/media/azure-deployed-1-1.png "azure-deployed-1-1") 192 | 193 | ## Prepare and provision Edge Cluster 194 | 195 | - Hardware requirements 196 | - **Resources**: 197 | - CPU: `4 vCPU` 198 | - Memory: `16GB` 199 | - Storage: `30GB` 200 | 201 | - **Operating System**: the solution requires a Linux-based system, specifically a VM or physical machine running `Linux Ubuntu 24.04`. This system will perform as an Edge Cluster, handling queries directly from the production line and interfacing with other operational systems. 202 | 203 | ### Option A (Virtual Machine in Azure Cloud) 204 | - If you want to use a Virtual Machine in Azure, you can deploy it using the Deploy button below: 205 | [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fchriscrcodes%2Fsmart-factory%2Frefs%2Fheads%2Fmain%2Fartifacts%2Ftemplates%2Fdeploy%2Fazure-vm.json) 206 | ![azure-deployed-2](./artifacts/media/azure-deployed-2.png "azure-deployed-2") 207 | - Fill the required information and click `Review + create` > `Create` 208 | > **Note**: `Standard_D4s_v3` is the recommended size for the Azure VM. 209 | - You should now see the following new resources in your Azure Resource Group (names may vary depending on the variables you defined): 210 | ![azure-deployed-2-2](./artifacts/media/azure-deployed-2-2.png "azure-deployed-2-2") 211 | 212 | ### Option B (your own Industrial PC or Virtual Machine) 213 | - Install `Linux Ubuntu 24.04` 214 | 215 | You can choose between 2 options: 216 | - [Option 1](#option-1---automated-installation-1) : automated installation with `Ansible` (Infra as Code) 217 | - [Option 2](#option-2---manual-installation-1) : manual installation using Azure CLI 218 | 219 | ### Option 1 - Automated installation 220 | - Copy the file `variables.yaml` to your Edge Cluster (in your home user directory) 221 | - Login and execute the following commands on your Edge Cluster 222 | - Install `Ansible`: 223 | ```bash 224 | sudo apt update && sudo apt install ansible -y 225 | ``` 226 | - Execute the playbook to install Azure IoT Operations to your Edge Cluster 227 | ```bash 228 | curl -O https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/deploy/2_edge-install_aio.yaml 229 | ansible-playbook 2_edge-install_aio.yaml 230 | ``` 231 | ![edge-deployed-1](./artifacts/media/edge-deployed-1.png "edge-deployed-1") 232 | ![edge-deployed-2](./artifacts/media/edge-deployed-2.png "edge-deployed-2") 233 | 234 | ### Option 2 - Manual installation 235 | - Login and execute the following commands on your Ubuntu Machine 236 | - Retrieve the following environment variables you noted earlier in [Cloud Part](#display-the-variables-you-created-and-keep-a-note-of-them-for-future-use) (result of `printenv` command), and paste them in the terminal (example below): 237 | ```bash 238 | TTYF_SCHEMA_REGISTRY_NAMESPACE=**** 239 | TTYF_FACTORY_AGENT_SERVICE_PRINCIPAL=**** 240 | TTYF_ARC_OBJECT_ID=d**** 241 | TTYF_SCHEMA_REGISTRY_NAME=**** 242 | TTYF_AIO_MI_COMPONENTS=**** 243 | ... 244 | ``` 245 | - Install `curl` and `nano`: 246 | ```bash 247 | sudo apt update 248 | sudo apt install curl nano -y 249 | ``` 250 | - Install K3s 251 | - Run the `K3s installation script`: 252 | ```bash 253 | curl -sfL https://get.k3s.io | sh - 254 | ``` 255 | - Create a `K3s configuration` file in `.kube/config`: 256 | ```bash 257 | mkdir ~/.kube 258 | sudo KUBECONFIG=~/.kube/config:/etc/rancher/k3s/k3s.yaml kubectl config view --flatten > ~/.kube/merged 259 | mv ~/.kube/merged ~/.kube/config 260 | chmod 0600 ~/.kube/config 261 | export KUBECONFIG=~/.kube/config 262 | kubectl config use-context default 263 | sudo chmod 644 /etc/rancher/k3s/k3s.yaml 264 | ``` 265 | - Increase user watch/instance limits: 266 | ```bash 267 | echo fs.inotify.max_user_instances=8192 | sudo tee -a /etc/sysctl.conf 268 | echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf 269 | sudo sysctl -p 270 | ``` 271 | - Increase file descriptor limit: 272 | ```bash 273 | echo fs.file-max = 100000 | sudo tee -a /etc/sysctl.conf 274 | sudo sysctl -p 275 | ``` 276 | - Check K3s installation 277 | ```bash 278 | kubectl get node 279 | ``` 280 | - Install Azure prerequisites 281 | - Install `Azure CLI`: 282 | ```bash 283 | curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash 284 | ``` 285 | - Install `Azure Arc extension`: 286 | ```bash 287 | az extension add --upgrade --name connectedk8s 288 | ``` 289 | - Install `Azure IoT Operations extension`: 290 | ```bash 291 | az extension add --upgrade --name azure-iot-ops 292 | ``` 293 | - Validate Azure IoT Operations pre-deployment checks 294 | - Before the deployment, use `az iot ops check` to execute IoT Operations pre-deployment checks. 295 | ```bash 296 | az iot ops check 297 | ``` 298 | - If everything is OK, you can continue with the deployment. If not, please check the [Azure IoT Operations documentation](https://learn.microsoft.com/en-us/azure/iot-operations/deploy-iot-ops/howto-prepare-cluster?tabs=ubuntu) for more information. 299 | - Install Azure IoT Operations 300 | - Connect to Azure using the service principal created in [Part 1 - Edge prerequisites](#edge-gateway-prerequisites) 301 | ```bash 302 | az login --service-principal --username $TTYF_AIO_SP_APPID --password $TTYF_AIO_SP_SECRET --tenant $TTYF_TENANT 303 | ``` 304 | - Select Azure Subscription: 305 | ```bash 306 | az account set --subscription $TTYF_SUBSCRIPTION_ID 307 | ``` 308 | - Connect Kubernetes Cluster to Azure via Azure Arc: 309 | ```bash 310 | az connectedk8s connect --name $TTYF_AIO_CLUSTER_NAME --location $TTYF_LOCATION --resource-group $TTYF_RESOURCE_GROUP --subscription $TTYF_SUBSCRIPTION_ID --enable-oidc-issuer --enable-workload-identity --disable-auto-upgrade 311 | ``` 312 | - Get the cluster's issuer URL: 313 | ```bash 314 | OIDC_ISSUER_PROFILE=$(az connectedk8s show --resource-group $TTYF_RESOURCE_GROUP --name $TTYF_AIO_CLUSTER_NAME --query oidcIssuerProfile.issuerUrl --output tsv) 315 | sudo tee -a /etc/rancher/k3s/config.yaml < **Note**: confirm post deployment checks are green. 344 | 345 | ```bash 346 | az iot ops check 347 | ``` 348 | 349 | ### Resources after provisioning 350 | - You should now see the following new resources in your Azure Resource Group (names may vary depending on the variables you defined): 351 | ![azure-deployed-3](./artifacts/media/azure-deployed-3.png "azure-deployed-3") 352 | 353 | ## Enable Data Streaming Ingestion 354 | - Azure - Authorize the cluster to connect to the Event Hub 355 | - Locate the Azure Event Hub name space you created in [Azure Portal](https://portal.azure.com/) 356 | - `Access Control (IAM)` > `Add` > `Add role assignment` 357 | - `Azure Event Hubs Data Sender` > `Next` 358 | - Assign access to `User, group, or service principal` 359 | - `Select Members` > type `azure-iot-operations-` to locate the `azure-iot-operations` extension 360 | (For example: `/subscriptions/xxx/resourceGroups/xxx/providers/Microsoft.Kubernetes/connectedClusters/xxx/providers/Microsoft.KubernetesConfiguration/extensions/azure-iot-operations-xxx`) 361 | 362 | ## Azure IoT Operations - Create Data flows 363 | - Download the [Distributed State Store](https://learn.microsoft.com/en-us/azure/iot-operations/create-edge-apps/concept-about-state-store-protocol) tool 364 | ```bash 365 | curl -O https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/dss/dss_set 366 | ``` 367 | - Set the file as executable 368 | ```bash 369 | chmod +x ./dss_set 370 | ``` 371 | - Download the Operators Dataset 372 | ```bash 373 | curl -O https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/dss/operators.json 374 | ``` 375 | - Download the Products Dataset 376 | ```bash 377 | curl -O https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/dss/products.json 378 | ``` 379 | - Import the operators dataset in the Distributed State Store 380 | ```bash 381 | ./dss_set --key operators --file "operators.json" --address localhost 382 | ``` 383 | - Import the products dataset in the Distributed State Store 384 | ```bash 385 | ./dss_set --key products --file "products.json" --address localhost 386 | ``` 387 | - Download the data flow 388 | ```bash 389 | curl -O https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/silver-to-cloud.yaml 390 | ``` 391 | - Modify file with the name of the event hub name space created in [Step 1](#prepare-and-provision-cloud-platform) (`EVENTHUB_NAMESPACE` variable): 392 | ```bash 393 | sed -i 's//'"${EVENTHUB_NAMESPACE}"'/' silver-to-cloud.yaml 394 | ``` 395 | - Modify file with the name of the event hub name created in [Step 1](#prepare-and-provision-cloud-platform) (`EVENTHUB_NAME` variable): 396 | ```bash 397 | sed -i 's//'"${EVENTHUB_NAME}"'/' silver-to-cloud.yaml 398 | ``` 399 | - Deploy the cloud connector 400 | ```bash 401 | kubectl apply -f silver-to-cloud.yaml 402 | ``` 403 | > **Note**: if you encounter an error "WARN[0000] Unable to read /etc/rancher/k3s/k3s.yaml", execute the following command: 404 | ``` 405 | sudo chmod 644 /etc/rancher/k3s/k3s.yaml 406 | ``` 407 | - Deploy the data flow (enrichment: bronze to silver) 408 | ```bash 409 | kubectl apply -f https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/bronze-to-silver.yaml 410 | ``` 411 | 412 | ## Deploy the MQTT Factory Data Simulator 413 | - Login and execute the following commands on your Ubuntu Machine 414 | - Factory Simulator 415 | ```bash 416 | kubectl apply -f https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/k3s/pods/simulator/configuration.yaml 417 | kubectl apply -f https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/k3s/pods/simulator/deployment.yaml 418 | ``` 419 | - Deploy MQTT Client 420 | ```bash 421 | kubectl apply -f https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/k3s/pods/mqtt-client/pod.yaml 422 | ``` 423 | - Connect to the container running the MQTT client 424 | ```bash 425 | kubectl exec --stdin --tty mqtt-client -n azure-iot-operations -- sh 426 | ``` 427 | - From within the container, launch the MQTT client: 428 | ```bash 429 | mqttui --broker mqtt://aio-broker-insecure:1883 --insecure 430 | ``` 431 | - Confirm if the 2 following topics are present: 432 | - `LightningCars` (data coming from the Factory MQTT Simulator) 433 | - `Silver` (data coming from Azure IoT Operations 'bronze to silver' Data Flow) 434 | 435 | ![MQTT Broker Client](./artifacts/media/mqttui.png "MQTT Broker Client") 436 | 437 | ## Confirm Data is flowing from Edge (Azure IoT Operations) to Cloud (Azure Event Hub) 438 | - Locate the Azure Event Hub Namespace you created in [Azure Portal](https://portal.azure.com/) 439 | - Data Explorer > select the event hub you created in [Step 1](#prepare-and-provision-cloud-platform) (`EVENTHUB_NAME` variable) 440 | - Click on `View events` and select a message on the right to confirm data flow is operational 441 | ![evh-messages](./artifacts/media/evh-messages.png "evh-messages") 442 | 443 | - ✅ **You can now continue to** > [Part 2 - Configure the solution in Microsoft Fabric](./INSTALL-2.md) -------------------------------------------------------------------------------- /INSTALL-2.md: -------------------------------------------------------------------------------- 1 | # Part 2 - Configure the solution in Microsoft Fabric 2 | 3 | ## Start a new Microsoft Fabric trial 4 | 5 | - Open the [Fabric homepage](https://app.fabric.microsoft.com/home) and select the Account manager. 6 | - In the Account manager, select Free trial. If you don't see Free trial or Start trial or a Trial status, trials aren't allowed for your tenant. 7 | 8 | ## Create database 9 | - Select 'Real-Time Intelligence' from the [Fabric homepage](https://app.powerbi.com/home?experience=kusto). 10 | ![fabric-home](./artifacts/media/fabric-home.png "fabric-home") 11 | - Click on `Workspaces` > `New workspace` > type `Smart Factory` and click `Apply` 12 | - Click `New` > `Eventhouse` > type `AIO` and click `Create` 13 | 14 | ## Create tables with reference data 15 | - Click on `Workspaces` > `Smart Factory` 16 | - Select the database `AIO` (type: `KQL Database`) 17 | - Create the table for `operators` dataset 18 | 1. Click on `Get data` > `Local file` > `New table` > type `operators` 19 | 2. Upload the file [operators.csv](./artifacts/templates/fabric/reference-datasets/operators.csv) > `Next` 20 | ![fabric-upload](./artifacts/media/fabric_operators-1.png "fabric-upload") 21 | 3. Click on `First row is column header` > `Finish`, wait for the ingestion (status "Successfully ingested") and click `Close` 22 | ![fabric-columns](./artifacts/media/fabric_operators-2.png "fabric-columns") 23 | - Create the table for `assets` dataset 24 | 1. Click on `Get data` > `Local file` > `New table` > type `assets` 25 | 2. Upload the file [assets.csv](./artifacts/templates/fabric/reference-datasets/assets.csv) > `Next` 26 | ![fabric-upload2](./artifacts/media/fabric_assets-1.png "fabric-upload2") 27 | 3. Click on `First row is column header` > `Finish`, wait for the ingestion (status "Successfully ingested") and click `Close` 28 | ![fabric-columns2](./artifacts/media/fabric_assets-2.png "fabric-columns2") 29 | - Create the table for `products` dataset 30 | 1. Click on `Get data` > `Local file` > `New table` > type `products` 31 | 2. Upload the file [products.csv](./artifacts/templates/fabric/reference-datasets/products.csv) > `Next` 32 | ![fabric-upload3](./artifacts/media/fabric_products-1.png "fabric-upload3") 33 | 3. Click on `First row is column header` > `Finish`, wait for the ingestion (status "Successfully ingested") and click `Close` 34 | ![fabric-columns3](./artifacts/media/fabric_products-2.png "fabric-columns3") 35 | **Note**: the reference datasets will enable data enrichment in the Cloud with datasets in the Cloud (operators, assets and products manufactured). 36 | 37 | ## Create table for silver data coming from Azure IoT Operations 38 | - Select the query set `AIO_queryset` 39 | - Add the following query: 40 | ``` 41 | .create table aio_silver ( 42 | Area: string, 43 | Cell: string, 44 | Downtime: double, 45 | EmployeeId: string, 46 | EnergyConsumption: double, 47 | Enterprise: string, 48 | GoodPartsCount: int64, 49 | IdealCycleTime: int64, 50 | Latitude: double, 51 | Line: string, 52 | Longitude: double, 53 | OperatingTime: int64, 54 | PlannedProductionTime: int64, 55 | ProductId: string, 56 | Shift: int64, 57 | ShiftHours: string, 58 | Site: string, 59 | Temperature: double, 60 | Timestamp: datetime, 61 | TotalPartsCount: int64, 62 | UNS: string, 63 | EventProcessedUtcTime: datetime, 64 | PartitionId: int64, 65 | EventEnqueuedUtcTime: datetime 66 | ) 67 | ``` 68 | - Select the query portion and click `Run` to create the table `aio_silver` 69 | 70 | ## Create update function to enrich data stream with reference datasets 71 | - Add the following query: 72 | ``` 73 | .create function with(folder = 'UpdatePolicyFunctions') EnrichWithReferenceData() { 74 | ["aio_silver"] 75 | | join kind=inner ['assets'] on Cell 76 | | join kind=inner ['products'] on Cell 77 | | join kind=inner ['operators'] on EmployeeId 78 | | project Timestamp, Enterprise, Site, Area, Line, Cell, SerialNumber, MaintenanceStatus, MaintenanceDate, ProductId, ProductName, EmployeeId, Operator, OperatorPhone, OperatorEmail, PlannedProductionTime, OperatingTime, TotalPartsCount, GoodPartsCount, IdealCycleTime, Downtime, EnergyConsumption, Temperature, Shift, ShiftHours, UNS, Latitude, Longitude 79 | } 80 | ``` 81 | - Select the query portion and click `Run` to create the function 82 | 83 | ## Create table for gold data enriched with cloud reference datasets (directory and maintenance) 84 | - Add the following query: 85 | ``` 86 | .set aio_gold <| 87 | EnrichWithReferenceData() 88 | ``` 89 | - Select the query portion and click `Run` to create the table `aio_gold` 90 | - You should now see 5 tables: 91 | ![fabric-tables](./artifacts/media/fabric-tables.png "fabric-tables") 92 | 93 | ## Disable streaming ingestion 94 | - Select the query set `AIO_queryset` 95 | - Add the following query: 96 | ``` 97 | .alter table aio_silver policy streamingingestion disable 98 | ``` 99 | - Select the query portion and click `Run` 100 | 101 | ## Activate the update policy 102 | - Add the following query: 103 | ``` 104 | .alter table aio_gold policy update 105 | @'[{ "IsEnabled": true, "Source": "aio_silver", "Query": "EnrichWithReferenceData()", "IsTransactional": false, "PropagateIngestionProperties": false}]' 106 | ``` 107 | - Select the query portion and click `Run` 108 | 109 | ## Authorize the Factory Agent to query the database 110 | - Retrieve the environment following variables you defined in [Part 1 - Provision resources (Cloud & Edge)](./INSTALL-1.md) ==> file `variables.yaml`: 111 | ```bash 112 | FACTORY_AGENT_SP_APPID 113 | TENANT 114 | ``` 115 | - Add the following query: 116 | ``` 117 | .add database AIO viewers ('aadapp=;') "Gen AI Factory Agent" 118 | ``` 119 | - Select the query portion and click `Run` 120 | 121 | ## Authorize the Factory Agent to query the table 122 | - Retrieve the environment following variables you defined in [Part 1 - Provision resources (Cloud & Edge)](./INSTALL-1.md) ==> file `variables.yaml`: 123 | ```bash 124 | FACTORY_AGENT_SP_APPID 125 | TENANT 126 | ``` 127 | - Add the following query: 128 | ``` 129 | .add table aio_gold admins ('aadapp=;') "Gen AI Factory Agent" 130 | ``` 131 | - Select the query portion and click `Run` 132 | 133 | ## Create the event stream to ingest data from Azure Event Hub to a database in Microsoft Fabric 134 | 1. Configure event stream source 135 | - Click on `Workspaces` > `Smart Factory` 136 | - `New` > `Eventstream` > choose the name `aio_silver` and click `Create` 137 | - Click on `Add source` > `External sources` > `Azure Event Hubs` > `Connect` 138 | - Create new connection 139 | - Retrieve variables created in [Part 1 - Provision resources (Cloud & Edge)](./INSTALL-1.md) ==> file `variables.yaml` 140 | - `Event Hub namespace` > `EVENTHUB_NAMESPACE` variable 141 | - `Event Hub` > `EVENTHUB_NAME` ` 142 | - Choose a connection name 143 | - `Shared Access Key Name` > `Listen` 144 | - `Shared Access Key` > `EVENTHUB_KEY` variable 145 | - Check that the connection name is correct 146 | - Tick the box `Test connection` and click `Connect` 147 | - `Consumer group` > type `Fabric` 148 | - `Data format` > select `Json` 149 | - `Next` > `Add` 150 | 151 | 2. Configure event stream data transformation 152 | - Click on `Transform events` > `Manage fields` 153 | - Connect event stream node to `ManageFields` node 154 | - Click on the pencil > `Add all fields` 155 | - Set the field `EmployeeId` to `String` (click on the three dots) > `Change type` > `Yes` 156 | ![fabric-eventstream-1-1](./artifacts/media/fabric_eventstream-1-1.png "fabric-eventstream-1-1") 157 | - Set the field `ProductId` to `String` > `Change type` > `Yes` 158 | ![fabric-eventstream-1-2](./artifacts/media/fabric_eventstream-1-2.png "fabric-eventstream-1-2") 159 | - Set the field `Timestamp` to `DateTime` > `Change type` > `Yes` 160 | ![fabric-eventstream-1-3](./artifacts/media/fabric_eventstream-1-3.png "fabric-eventstream-1-3") 161 | 162 | 3. Configure event stream destination 163 | - Click on `Add destination` > `Eventhouse` 164 | - Tick the box `Event processing before ingestion` 165 | - Choose a `Destination name` 166 | - `Workspace` > select `Smart Factory` 167 | - `Eventhouse` > select `AIO` 168 | - `KQL Database` > select the database `AIO` 169 | - `KQL Destination table` > select `aio_silver` 170 | - `Input data format` > `Json` 171 | - Tick the box `Activate streaming after adding data source` > `Save` 172 | - Connect `ManageFields` node to Destination node 173 | - Click `Save` 174 | ![fabric-eventstream-2](./artifacts/media/fabric_eventstream-2.png "fabric-eventstream-2") 175 | - Click `Publish` to start the Eventstream 176 | 177 | - ✅ **You can now continue to** > [Part 3 - Deploy and use the Generative AI Factory Agent](./INSTALL-3.md) -------------------------------------------------------------------------------- /INSTALL-3.md: -------------------------------------------------------------------------------- 1 | # Part 3 - Deploy and use the Generative AI Factory Agent 2 | 3 | ## Get Large Language Model (LLM) information from Azure AI Foundry Portal 4 | - Login to [Azure AI Foundry Portal](https://ai.azure.com/) 5 | - Select your deployment in `Shared resources` > `Deployments` > `talk-to-your-factory` 6 | - Copy the following information in `Endpoint` section: `Target URI` and `Key`. We will need them in the next section. 7 | 8 | ## Create an environment variable file 9 | - Rename the file [`.env_template`](./artifacts/factory-agent/.env_template) to `.env` 10 | - Retrieve the environment following variables you defined in [Part 1 - Provision resources (Cloud & Edge)](./INSTALL-1.md) ==> file `variables.yaml`: 11 | ```bash 12 | FACTORY_AGENT_SP_APPID 13 | FACTORY_AGENT_SP_SECRET 14 | TENANT 15 | ``` 16 | - Select 'Real-Time Intelligence' from the [Fabric homepage](https://app.powerbi.com/home?experience=kusto). 17 | ![fabric-home](./artifacts/media/fabric-home.png "fabric-home") 18 | - Click on `Workspaces` > `Smart Factory` 19 | - Select the database `AIO` (type: `KQL Database`) 20 | - Retrieve the Fabric endpoint from `Overview` > `Query URI` > click `Copy URI` 21 | - Modify environment variables in `.env` file 22 | ```bash 23 | AZURE_OPENAI_ENDPOINT = < Azure AI Foundry Portal => Target URI > 24 | AZURE_OPENAI_API_KEY = < Azure AI Foundry Portal => Key > 25 | AZURE_OPENAI_DEPLOYMENT_NAME = "talk-to-your-factory" 26 | AZURE_OPENAI_MODEL_NAME = "gpt-4o-mini" 27 | AZURE_OPENAI_DEPLOYMENT_VERSION = "2024-07-18" 28 | 29 | AZURE_AD_TENANT_ID = < variables.yaml => TENANT > 30 | KUSTO_CLUSTER = < Microsoft Fabric => Query URI > 31 | KUSTO_MANAGED_IDENTITY_APP_ID = < variables.yaml => FACTORY_AGENT_SP_APPID > 32 | KUSTO_MANAGED_IDENTITY_SECRET = < variables.yaml => FACTORY_AGENT_SP_SECRET > 33 | KUSTO_DATABASE_NAME = "AIO" 34 | KUSTO_TABLE_NAME = "aio_gold" 35 | ``` 36 | 37 | ## Start the Factory Agent Application 38 | - Option 1 (from command line) 39 | - Start a terminal from the [directory](./artifacts/factory-agent/) 40 | - Execute the following commands: 41 | ```bash 42 | pip install -r requirements.txt 43 | streamlit run .\frontend.py 44 | ``` 45 | - Option 2 (Docker) 46 | - Start a terminal from the [directory](./artifacts/factory-agent/) 47 | - Execute the following commands: 48 | ```bash 49 | docker build . -t factory-agent:v1.0 50 | docker run -p 8501:8501 factory-agent:v1.0 51 | ``` 52 | - Launch a browser with the following URL to access the application: 53 | ``` 54 | http://localhost:8501/ 55 | ``` 56 | - You can now query the database using Natural Language 57 | > **IMPORTANT**: No actual data from the database is transmitted to the Large Language Model; only the prompt and the database schema are shared. The LLM will generate the query to be executed against the database, but it won't execute the query itself. 58 | - Some example queries are provided. 59 | 60 | ![Factory Agent User Interface](./artifacts/media/demo-video.gif "Factory Agent User Interface") -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [2024] [Christophe Crémon] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🗣️ Talk to your Factory 2 | 3 | ## Introduction 4 | 5 | Welcome to the "Talk to your Factory" project, a fascinating open-source initiative that allows you to control and interact with industrial equipment using natural language processing (NLP), Edge and Cloud technologies. 6 | 7 | What's about? The project aims to bridge the gap between humans and industrial machines by enabling people to communicate with factory equipment using simple text inputs. 8 | 9 | See how the Smart Factory leverages Generative AI to optimize its operations! 10 | 11 | ## Technology 12 | 13 | 🏭 Real-time ingestion and processing of operational data (OT): operators, manufactured products, and machine maintenance schedules. 14 | 🤖 Data processing: Edge and Cloud, with a Semantic Kernel & Generative AI model to power the Factory Agent, for smarter interactions. 15 | 16 | ### Key features and benefits 17 | 18 | - **Data Processing**: Data structure following a **Medallion Architecture**, with the goal of incrementally and progressively improving the structure and quality of data as it flows through each layer of the architecture. 19 | From `Bronze` (Edge: MQTT Data Simulator) ⇒ `Silver` (Edge: Azure IoT Operations) ⇒ `Gold` (Cloud: Microsoft Fabric) layer tables. 20 | 21 | - **Natural Language Processing (NLP)**: a Factory Agent, enhanced by Generative AI, empowers operators, so they can ask complex questions about machine operations, staff, production quality, as if they were speaking to a human expert in the Factory. 22 | 23 | ## Architecture 24 | 25 | ### Solution architecture overview 26 | 27 | ![Architecture Diagram](./artifacts/media/architecture-overview.png "Solution Overview") 28 | 29 | ### Factory simulation 30 | 31 | ![Factory Simulation](./artifacts/media/simulation.png "Factory Simulation") 32 | 33 | ### Key components 34 | 35 | ![Data Diagram](./artifacts/media/key-components.png "Data Diagram") 36 | 37 | 1. [**Factory Simulator**](./artifacts/mqtt-data-simulator/README.md) 38 | Simulates data coming from several factories: Berlin, Austin, Buffalo, Shanghai. 39 | Factory simulator is publishing data to an Message Queuing Telemetry Transport (MQTT) broker topic based on the international standard from the International Society of Automation known as 'ISA-95' with the following format: Enterprise/Site/Area/Line/Cell. 40 | Industrial machines involved in the process are 'Cells.' 41 | 42 | > Messages are published following the **UNS (Unified Namespace) Architecture**. 43 | The UNS is a method of organizing and structuring data in a way that makes it accessible and meaningful across an entire enterprise. 44 | ![UNS](./artifacts/media/UNS.png "UNS") 45 | 46 | 2. [**Azure IoT Operations**](https://learn.microsoft.com/en-us/azure/iot-operations/overview-iot-operations) 47 | Processes data at Edge: normalize, contextualize, enrich with Edge reference datasets (Operators and Products). 48 | 49 | 3. [**Azure Event Hub**](https://learn.microsoft.com/en-us/azure/event-hubs/event-hubs-about) 50 | Data ingestion in Azure. 51 | 52 | 4. [**Microsoft Fabric Real-Time Intelligence**](https://learn.microsoft.com/en-us/fabric/real-time-intelligence/overview) 53 | Processes data in Azure: materialize data as a Table, enrich with Cloud reference datasets (Operators, Assets and Products). 54 | 55 | 5. [**Generative AI Factory Agent**](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview) 56 | Introducing a custom Large Language Model (LLM) Factory Agent, based on OpenAI model 'GPT-4o', that enables natural language communication with the factory. This agent simplifies the process of retrieving information from various systems and databases. 57 | 58 | ### Communication flow 59 | 60 | ![Factory Agent Communication Flow](./artifacts/media/factory-agent-communication-flow.png "Factory Agent Communication Flow") 61 | 62 | 1. **User Prompt**: user asks a question to the Factory Agent. 63 | The graphical user interface is based on the open-source framework [`Streamlit`](https://streamlit.io/). 64 | 2. **Custom Large Language Model (LLM)**: analyzes the prompt and generate the corresponding query to be executed to the database in Microsoft Fabric. 65 | 3. [**Semantic Kernel**](https://aka.ms/semantic-kernel): execute query and return results (Python application). 66 | 67 | #### Creating complex queries from natural language prompt - Example 68 | ![Factory Agent Prompt](./artifacts/media/factory-agent-prompt.png "Factory Agent Prompt") 69 | 70 | 1. **Prompt**: _"Determine the yield percentage for each Site this month by comparing the total units produced to the number of good units produced."_ 71 | 2. **Generative AI Model**: analyzes the prompt and generate the corresponding query to be executed to the database. 72 | 73 | > **IMPORTANT**: No actual data from the database is transmitted to the Large Language Model; only the prompt and the database schema are shared. The LLM will generate the query to be executed against the database, but it won't execute the query itself. 74 | 75 | Example query generated in `KQL (Kusto Query Language)`: 76 | ``` 77 | aio_gold 78 | | where Timestamp >= startofmonth(now()) 79 | | summarize TotalUnitsProduced = sum(TotalPartsCount), GoodUnitsProduced = sum(GoodPartsCount) by Site 80 | | extend YieldPercentage = (GoodUnitsProduced / TotalUnitsProduced) * 100 81 | | project Site, YieldPercentage 82 | | limit 100 83 | ``` 84 | 85 | 3. **Back-end application `(Python)`**: queries the database to retrieve results. 86 | 87 | 4. **Front-end application `(Streamlit)`**: provides the user interface. 88 | 89 | ## Prerequisites 90 | Microsoft Documentation: [Azure IoT Operations prerequisites](https://learn.microsoft.com/en-us/azure/iot-operations/deploy-iot-ops/howto-prepare-cluster?tabs=ubuntu) 91 | 92 | ### Hardware requirements 93 | 94 | - **Resources**: 95 | - CPU: `4 vCPU` 96 | - Memory: `16GB` 97 | - Storage: `30GB` 98 | 99 | - **Operating System**: the solution requires a Linux-based system, specifically a VM or physical machine running `Linux Ubuntu 24.04`. This system will perform as an Edge server, handling queries directly from the production line and interfacing with other operational systems. 100 | 101 | ### Software requirements 102 | 103 | - [`K3s`](https://k3s.io/) Lightweight Kubernetes. Easy to install, half the memory, all in a binary of less than 100 MB. 104 | - [`python >=v3.10`](https://www.python.org/) Programming Language 105 | - [`Azure CLI`](https://learn.microsoft.com/en-us/cli/azure/) the Azure command-line interface. 106 | 107 | ### Cloud services requirements 108 | 109 | - Azure Subscription (with Contributor rights) 110 | - The solution will deploy the following resources: 111 | - Azure IoT Operations prerequisites 112 | - Resource Group 113 | - Storage Account 114 | - Schema Registry 115 | - 2 Managed Identities 116 | - 2 App Registrations (Service Principal for Edge Gateway & Factory Agent) 117 | - Data Streaming Ingestion 118 | - Event Hub 119 | - Factory Agent 120 | - Azure Open AI Service 121 | - _Optional_: Virtual Machine (if you want to test everything in Azure Cloud) 122 | - Microsoft Fabric Tenant (you can try it for free [here](https://www.microsoft.com/en-us/microsoft-fabric/getting-started?msockid=27cd43526f4e6b2a1fa857d06e486a3c)) 123 | 124 | ## Demo 125 | 126 | ![Factory Agent User Interface](./artifacts/media/demo-video.gif "Factory Agent User Interface") 127 | 128 | ## Videos 129 | 130 | - **IoT Show** hosted by `Olivier Bloch` 131 | [![Video on the IoT Show](https://img.youtube.com/vi/-AxWwJU_G_U/hqdefault.jpg)](https://www.youtube.com/embed/-AxWwJU_G_U) 132 | - **Azure Arc Jumpstart** hosted by `Lior Kamrat` 133 | [![Video on the Arc Jumpstart Show](https://img.youtube.com/vi/cN6urmB_7jY/hqdefault.jpg)](https://www.youtube.com/embed/cN6urmB_7jY) 134 | 135 | ## Solution build steps 136 | 137 | Deploy the solution in 3 steps! 138 | 139 | ### 1. [Provision resources (Cloud & Edge)](./INSTALL-1.md) 140 | ### 2. [Configure the solution (Microsoft Fabric)](./INSTALL-2.md) 141 | ### 3. [Deploy and use the Generative AI Factory Agent](./INSTALL-3.md) 142 | 143 | ### Additional resources 144 | - [Uninstall the solution](./UNINSTALL.md) -------------------------------------------------------------------------------- /UNINSTALL.md: -------------------------------------------------------------------------------- 1 | # Uninstall Procedure 2 | 3 | ## 1-Edge unprovisioning 4 | - From the Edge Cluster, execute the following commands: 5 | ```bash 6 | az account set --subscription $TTYF_SUBSCRIPTION_ID 7 | az iot ops delete --yes --name $TTYF_AIO_CLUSTER_NAME --resource-group $TTYF_RESOURCE_GROUP --include-deps 8 | az connectedk8s delete --yes --name $TTYF_AIO_CLUSTER_NAME --resource-group $TTYF_RESOURCE_GROUP 9 | az logout 10 | /usr/local/bin/k3s-uninstall.sh 11 | rm -r ~/.kube 12 | ``` 13 | 14 | ## 2-Cloud unprovisioning 15 | - Open a browser and navigate to the [Azure Portal](https://portal.azure.com/) 16 | - Use the [Azure Cloud Shell (**Bash**)](https://learn.microsoft.com/en-us/azure/cloud-shell/get-started/ephemeral?tabs=azurecli#start-cloud-shell) 17 | - Execute the following commands in Azure Cloud Shell (Bash): 18 | ```bash 19 | az account set --subscription $TTYF_SUBSCRIPTION_ID 20 | az group delete --resource-group $TTYF_RESOURCE_GROUP --yes 21 | az keyvault purge --no-wait --name $TTYF_KEYVAULT_NAME --location $TTYF_LOCATION 22 | az cognitiveservices account purge --name $TTYF_AZURE_OPENAI_NAME --resource-group $TTYF_RESOURCE_GROUP --location "swedencentral" 23 | az ad app delete --id $TTYF_FACTORY_AGENT_SP_APPID 24 | az ad app delete --id $TTYF_AIO_SP_APPID 25 | ``` -------------------------------------------------------------------------------- /artifacts/factory-agent/.env_template: -------------------------------------------------------------------------------- 1 | GLOBAL_LLM_SERVICE = "AzureOpenAI" 2 | AZURE_OPENAI_ENDPOINT = "" 3 | AZURE_OPENAI_API_KEY = "" 4 | AZURE_OPENAI_DEPLOYMENT_NAME = "" 5 | 6 | AZURE_AD_TENANT_ID = "" 7 | KUSTO_CLUSTER = "" 8 | KUSTO_MANAGED_IDENTITY_APP_ID = "" 9 | KUSTO_MANAGED_IDENTITY_SECRET = "" 10 | KUSTO_DATABASE_NAME = "" 11 | KUSTO_TABLE_NAME = "" -------------------------------------------------------------------------------- /artifacts/factory-agent/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use the official Python 3.12.3 slim image as the base image. 2 | FROM python:3.12.3-slim 3 | 4 | # Set the working directory inside the container to /app. 5 | WORKDIR /app 6 | 7 | # Copy the current directory's contents (where the Dockerfile is located) to /app inside the container. 8 | COPY . /app 9 | 10 | # Install Python dependencies specified in the requirements.txt file. 11 | RUN pip3 install -r requirements.txt 12 | 13 | # Expose port 8501, which is the port the Streamlit app will run on. 14 | EXPOSE 8501 15 | 16 | # Define a health check to verify if the Streamlit server is running by sending a request to the health endpoint. 17 | HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health 18 | 19 | # Set the entry point for the container to run Streamlit. It starts the Streamlit app by running 'frontend.py', 20 | # and binds it to port 8501, accessible on all network interfaces (0.0.0.0). 21 | ENTRYPOINT ["streamlit", "run", "frontend.py", "--server.port=8501", "--server.address=0.0.0.0"] -------------------------------------------------------------------------------- /artifacts/factory-agent/Plugins/DataAnalysis/KustoQL/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "schema": 1, 3 | "description": "Generate a KQL query", 4 | "type": "completion", 5 | "completion": { 6 | "max_tokens": 4096, 7 | "temperature": 0, 8 | "top_p": 1, 9 | "presence_penalty": 0, 10 | "frequency_penalty": 0 11 | }, 12 | "input": { 13 | "parameters": [ 14 | { 15 | "name": "request", 16 | "description": "User request", 17 | "defaultValue": "" 18 | }, 19 | { 20 | "name": "table", 21 | "description": "Name of the Kusto Table", 22 | "defaultValue": "" 23 | }, 24 | { 25 | "name": "schema", 26 | "description": "Schema of the Kusto Table", 27 | "defaultValue": "" 28 | } 29 | ] 30 | } 31 | } -------------------------------------------------------------------------------- /artifacts/factory-agent/Plugins/DataAnalysis/KustoQL/skprompt.txt: -------------------------------------------------------------------------------- 1 | [RULES] 2 | Behave as an expert in Azure Data Explorer Kusto Query Language (KQL). 3 | Your task is to generate KQL statements based on the user's prompt, the table name, and schema provided. 4 | 5 | [CONTEXT] 6 | Table name: {{$table}} 7 | Table schema: {{$schema}} 8 | User input: {{$request}} 9 | 10 | [INSTRUCTIONS] 11 | 1. Ensure that the generated KQL query is non-destructive. Do not include any commands that delete, update, or modify data. 12 | 2. If a valid KQL query is generated, enclose it with "RESPONSE_START" and "RESPONSE_END" markers. 13 | 3. If not explicitly stated by the user, return distinct values in the query results. 14 | 4. Limit the number of query results to 100. 15 | 5. Make sure the generated KQL query is syntactically correct and compatible with the provided table name and schema. 16 | 6. If the user's prompt is not clear or if it is not possible to generate a valid KQL query based on the provided context, respond with a message indicating that you are unable to generate a query based on the given input. -------------------------------------------------------------------------------- /artifacts/factory-agent/frontend.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import asyncio 4 | import yaml 5 | import json 6 | import pandas as pd 7 | import streamlit as st 8 | from dotenv import load_dotenv 9 | from azure.kusto.data import KustoConnectionStringBuilder 10 | from azure.kusto.data.aio import KustoClient 11 | from azure.kusto.data.helpers import dataframe_from_result_table 12 | from semantic_kernel import Kernel 13 | from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion 14 | from semantic_kernel.functions import KernelArguments 15 | from semantic_kernel.contents.chat_history import ChatHistory 16 | 17 | # Load environment variables from .env file 18 | load_dotenv() 19 | 20 | async def connect_kusto(database: str, query: str) -> str | pd.DataFrame: 21 | """Establish an asynchronous connection to the Kusto database and execute a query.""" 22 | kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication( 23 | os.getenv('KUSTO_CLUSTER'), 24 | os.getenv('KUSTO_MANAGED_IDENTITY_APP_ID'), 25 | os.getenv('KUSTO_MANAGED_IDENTITY_SECRET'), 26 | os.getenv('AZURE_AD_TENANT_ID') 27 | ) 28 | 29 | async with KustoClient(kcsb) as client: 30 | try: 31 | client_execute = await client.execute(database, query) 32 | results = client_execute.primary_results[0] 33 | return dataframe_from_result_table(results) 34 | except SyntaxError as e: 35 | return f"Syntax error in query: {str(e)}" 36 | except Exception as e: 37 | return f"Error while executing query: {str(e)}" 38 | 39 | def instantiate_kernel() -> tuple[Kernel, str]: 40 | """Instantiate the Semantic Kernel with the OpenAI service.""" 41 | kernel = Kernel() 42 | chat_completion = AzureChatCompletion( 43 | deployment_name=os.getenv('AZURE_OPENAI_DEPLOYMENT_NAME'), 44 | api_key=os.getenv('AZURE_OPENAI_API_KEY'), 45 | endpoint=os.getenv('AZURE_OPENAI_ENDPOINT') 46 | ) 47 | kernel.add_service(chat_completion) 48 | 49 | plugin = kernel.add_plugin( 50 | parent_directory=os.path.join(__file__, "../Plugins/"), 51 | plugin_name="DataAnalysis" 52 | ) 53 | return kernel, plugin 54 | 55 | async def agent_kusto(kernel: Kernel, plugin: str, prompt: str, database: str, table: str, schema: str, chat_history: ChatHistory) -> str | pd.DataFrame: 56 | """Generate a Kusto query using the Semantic Kernel and execute it.""" 57 | arguments = KernelArguments(request=prompt, table=table, schema=schema) 58 | chat_history.add_user_message(prompt) 59 | 60 | sk_invoke = await kernel.invoke(plugin["KustoQL"], arguments, chat_history=chat_history) 61 | cleaned_output = str(sk_invoke).replace("kusto", "").replace("```kql", "").replace("```", "") 62 | sk_response = re.search(r"RESPONSE_START\n((.|\n)*)\nRESPONSE_END", cleaned_output) 63 | 64 | if sk_response: 65 | query_statement = sk_response.group(1).strip() 66 | 67 | if "kql" not in str(sk_invoke).lower(): 68 | chat_history.add_assistant_message(query_statement) 69 | return query_statement 70 | 71 | print(f"\n[DEBUG] QUERY:\n{query_statement}\n") 72 | 73 | execute_query_kusto_db = await connect_kusto(database, query_statement) 74 | 75 | if isinstance(execute_query_kusto_db, str): 76 | return execute_query_kusto_db # Return the error message 77 | 78 | if execute_query_kusto_db.empty: 79 | return "No data found for the given query." 80 | 81 | chat_history.add_assistant_message(execute_query_kusto_db.to_json(orient="records")) 82 | return execute_query_kusto_db # Return the DataFrame for display 83 | 84 | chat_history.add_assistant_message(cleaned_output) 85 | return cleaned_output 86 | 87 | def clear_input() -> None: 88 | """Clear the session state for chat messages and reset the application state.""" 89 | st.session_state.pop("messages", None) 90 | st.session_state.pop("chat_history", None) 91 | 92 | # Instantiate Semantic Kernel and Plugin 93 | kernel, plugin = instantiate_kernel() 94 | 95 | # Retrieve Kusto Table Schema 96 | query_schema = asyncio.run(connect_kusto( 97 | os.getenv('KUSTO_DATABASE_NAME'), 98 | f".show table {os.getenv('KUSTO_TABLE_NAME')} schema as json" 99 | )) 100 | schema = str(query_schema.Schema[0]) 101 | 102 | # Configure Streamlit application 103 | with open('./frontend_config.yml', 'r') as file: 104 | config = yaml.safe_load(file) 105 | title = config['streamlit']['title'] 106 | 107 | # Load sample questions from JSON file 108 | with open('sample_questions.json', 'r', encoding="utf-8") as file: 109 | example_questions = json.load(file) 110 | question_list = [q for q in example_questions.values()] 111 | 112 | # Set the full-screen page configuration for the app 113 | st.set_page_config(page_title=config['streamlit']['tab_title'], layout='wide') 114 | 115 | # Display logo and title 116 | st.image(config['streamlit']['logo'], width=800) 117 | st.title(title) 118 | 119 | # Initialize session state for messages if not already present 120 | if "messages" not in st.session_state: 121 | st.session_state.messages = [{"role": "assistant", "content": config['streamlit']['agent_intro_message']}] 122 | 123 | # Ensure chat history is initialized as well 124 | if "chat_history" not in st.session_state: 125 | st.session_state.chat_history = ChatHistory() 126 | 127 | # Sidebar for displaying example questions and reset button 128 | with st.sidebar: 129 | st.header("Frequently Asked Questions") 130 | for question in question_list: 131 | with st.expander(label=question, expanded=False): 132 | if st.button("Ask", key=question): # Unique key for each button 133 | # Check if the question has already been asked 134 | if not any(isinstance(msg["content"], str) and msg["content"] == question for msg in st.session_state.messages): 135 | st.session_state.messages.append({"role": "user", "content": question}) 136 | 137 | if st.button("Reset Chat"): 138 | clear_input() # Clear session state and reinitialize 139 | 140 | # Display chat messages in the chat window 141 | if "messages" not in st.session_state: 142 | st.session_state.messages = [{"role": "assistant", "content": config['streamlit']['agent_intro_message']}] 143 | 144 | for message in st.session_state.messages: 145 | with st.chat_message(message["role"]): 146 | content = message["content"] 147 | if isinstance(content, pd.DataFrame): 148 | st.dataframe(content.style.format(na_rep='NA')) 149 | else: 150 | st.markdown(f"**{message['role'].capitalize()}:** {content.strip()}") # Enhanced formatting for readability 151 | 152 | # User-provided input for additional prompts 153 | if prompt := st.chat_input(): 154 | st.session_state.messages.append({"role": "user", "content": prompt}) 155 | with st.chat_message("user"): 156 | st.markdown(f"**User:** {prompt.strip()}") # Enhanced formatting 157 | 158 | last_msg = st.session_state.messages[-1] 159 | 160 | # Generate a new response if the last message is not from the agent 161 | if last_msg["role"] != "assistant": 162 | with st.chat_message("assistant"): 163 | with st.spinner('Thinking...'): 164 | response = asyncio.run(agent_kusto( 165 | kernel, plugin, last_msg["content"], 166 | os.getenv('KUSTO_DATABASE_NAME'), os.getenv('KUSTO_TABLE_NAME'), schema, 167 | st.session_state.chat_history # Pass chat history to the agent 168 | )) 169 | if isinstance(response, pd.DataFrame): 170 | st.dataframe(response.style.format(na_rep='NA')) # Ensure DataFrame is displayed correctly 171 | else: 172 | st.markdown(response.strip()) # Use markdown for other responses 173 | st.session_state.messages.append({"role": "assistant", "content": response}) -------------------------------------------------------------------------------- /artifacts/factory-agent/frontend_config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | streamlit: 3 | title: "AI Factory Agent" 4 | tab_title: "AI Factory Agent" 5 | logo: "./images/smart-factory.jpeg" 6 | agent_intro_message: "How can I help?" -------------------------------------------------------------------------------- /artifacts/factory-agent/images/smart-factory.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/factory-agent/images/smart-factory.jpeg -------------------------------------------------------------------------------- /artifacts/factory-agent/requirements.txt: -------------------------------------------------------------------------------- 1 | utils==1.0.2 2 | asyncio==3.4.3 3 | azure-kusto-data==4.6.1 4 | azure-kusto-data[aio]==4.6.1 5 | semantic-kernel==1.17.0 6 | openai==1.58.1 7 | streamlit==1.41.1 -------------------------------------------------------------------------------- /artifacts/factory-agent/sample_questions.json: -------------------------------------------------------------------------------- 1 | { 2 | "question1": "What products are we manufacturing, and at which Sites are they produced?", 3 | "question2": "Determine the yield percentage for each Site this month by comparing the total units produced to the number of good units produced.", 4 | "question3": "Provide a list of operators, their respective sites, and their current shift schedules.", 5 | "question4": "Si necesito ponerme en contacto con el operador Steve Harris en las instalaciones de Austin, ¿cuál es su número de teléfono y su dirección de correo electrónico?", 6 | "question5": "能否提供单元格列表,包括 “区域”、“站点 ”维护日期和维护状态?" 7 | } -------------------------------------------------------------------------------- /artifacts/media/UNS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/UNS.png -------------------------------------------------------------------------------- /artifacts/media/ansible-prov-cloud-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/ansible-prov-cloud-1.png -------------------------------------------------------------------------------- /artifacts/media/ansible-prov-cloud-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/ansible-prov-cloud-2.png -------------------------------------------------------------------------------- /artifacts/media/architecture-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/architecture-overview.png -------------------------------------------------------------------------------- /artifacts/media/azure-deployed-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/azure-deployed-1-1.png -------------------------------------------------------------------------------- /artifacts/media/azure-deployed-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/azure-deployed-1.png -------------------------------------------------------------------------------- /artifacts/media/azure-deployed-2-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/azure-deployed-2-2.png -------------------------------------------------------------------------------- /artifacts/media/azure-deployed-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/azure-deployed-2.png -------------------------------------------------------------------------------- /artifacts/media/azure-deployed-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/azure-deployed-3.png -------------------------------------------------------------------------------- /artifacts/media/cloud-uninstall.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/cloud-uninstall.png -------------------------------------------------------------------------------- /artifacts/media/demo-video.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/demo-video.gif -------------------------------------------------------------------------------- /artifacts/media/edge-deployed-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/edge-deployed-1.png -------------------------------------------------------------------------------- /artifacts/media/edge-deployed-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/edge-deployed-2.png -------------------------------------------------------------------------------- /artifacts/media/edge-deployed-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/edge-deployed-3.png -------------------------------------------------------------------------------- /artifacts/media/evh-messages.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/evh-messages.png -------------------------------------------------------------------------------- /artifacts/media/fabric-home.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric-home.png -------------------------------------------------------------------------------- /artifacts/media/fabric-tables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric-tables.png -------------------------------------------------------------------------------- /artifacts/media/fabric_assets-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_assets-1.png -------------------------------------------------------------------------------- /artifacts/media/fabric_assets-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_assets-2.png -------------------------------------------------------------------------------- /artifacts/media/fabric_eventstream-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_eventstream-1-1.png -------------------------------------------------------------------------------- /artifacts/media/fabric_eventstream-1-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_eventstream-1-2.png -------------------------------------------------------------------------------- /artifacts/media/fabric_eventstream-1-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_eventstream-1-3.png -------------------------------------------------------------------------------- /artifacts/media/fabric_eventstream-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_eventstream-1.png -------------------------------------------------------------------------------- /artifacts/media/fabric_eventstream-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_eventstream-2.png -------------------------------------------------------------------------------- /artifacts/media/fabric_operators-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_operators-1.png -------------------------------------------------------------------------------- /artifacts/media/fabric_operators-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_operators-2.png -------------------------------------------------------------------------------- /artifacts/media/fabric_products-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_products-1.png -------------------------------------------------------------------------------- /artifacts/media/fabric_products-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/fabric_products-2.png -------------------------------------------------------------------------------- /artifacts/media/factory-agent-communication-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/factory-agent-communication-flow.png -------------------------------------------------------------------------------- /artifacts/media/factory-agent-prompt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/factory-agent-prompt.png -------------------------------------------------------------------------------- /artifacts/media/factory-assistant-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/factory-assistant-ui.png -------------------------------------------------------------------------------- /artifacts/media/key-components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/key-components.png -------------------------------------------------------------------------------- /artifacts/media/mqttui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/mqttui.png -------------------------------------------------------------------------------- /artifacts/media/simulation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/media/simulation.png -------------------------------------------------------------------------------- /artifacts/mqtt-data-simulator/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a base image with Python installed 2 | FROM python:3.12.3-slim 3 | 4 | # Set the working directory 5 | WORKDIR /app 6 | 7 | # Copy the Python simulator into the container 8 | COPY simulator.py requirements.txt . 9 | 10 | # Install required Python packages 11 | RUN pip3 install -r requirements.txt 12 | 13 | # Command to run the simulator 14 | CMD ["python", "simulator.py"] -------------------------------------------------------------------------------- /artifacts/mqtt-data-simulator/README.md: -------------------------------------------------------------------------------- 1 | # MQTT Data Simulator 2 | 3 | This project simulates data publishing to an MQTT broker, generating dynamic data based on configuration files. The simulation supports multiple data types (numeric, boolean, datetime, string, etc.) and topics, making it ideal for testing IoT or distributed systems where data is exchanged through MQTT. 4 | 5 | ## Table of Contents 6 | 7 | - [Features](#features) 8 | - [Installation](#installation) 9 | - [Configuration](#configuration) 10 | - [Usage](#usage) 11 | - [MQTT Client Configuration](#mqtt-client-configuration) 12 | - [Data Generation Logic](#data-generation-logic) 13 | - [Tag Types](#tag-types) 14 | - [Additional Parameters](#additional-parameters) 15 | - [Error Handling](#error-handling) 16 | - [Logging](#logging) 17 | - [License](#license) 18 | 19 | ## Features 20 | 21 | - Simulates MQTT data publishing with customizable configuration. 22 | - Supports multiple data types: boolean, numeric, datetime, string, UUID. 23 | - Incremental and bounded value generation for numeric tags. 24 | - Configurable MQTT client with support for authentication and TLS encryption. 25 | - Automatic handling of data publishing intervals. 26 | - Provides colored terminal output for better readability. 27 | 28 | ## Installation 29 | 30 | 1. Clone the repository: 31 | ```bash 32 | git clone https://github.com/chriscrcodes/talk-to-your-factory.git 33 | cd artifacts/mqtt-data-simulator 34 | ``` 35 | 36 | 2. Install the required dependencies: 37 | ```bash 38 | pip install -r requirements.txt 39 | ``` 40 | 41 | Dependencies include: 42 | - `paho-mqtt` 43 | - `colorama` 44 | - `pyfiglet` 45 | 46 | ## Configuration 47 | 48 | The simulator reads configuration from a `config.json` file. Below is an example configuration: 49 | 50 | ```json 51 | { 52 | "mqtt_broker": { 53 | "address": "broker.hivemq.com", 54 | "port": 1883, 55 | "username": "your_username", 56 | "password": "your_password", 57 | "use_tls": false 58 | }, 59 | "root_topic": "home/sensor", 60 | "publish_interval": 5, 61 | "topics": [ 62 | { 63 | "topics": ["temperature", "humidity"], 64 | "tags": [ 65 | { 66 | "tag": "temp_sensor", 67 | "type": "numeric", 68 | "min_value": 10, 69 | "max_value": 35, 70 | "increment_step": 0.5, 71 | "reset": true 72 | }, 73 | { 74 | "tag": "humidity_sensor", 75 | "type": "numeric", 76 | "min_value": 20, 77 | "max_value": 80, 78 | "increment_step": 1 79 | } 80 | ] 81 | } 82 | ] 83 | } 84 | ``` 85 | 86 | ### Configuration Fields 87 | 88 | - **mqtt_broker**: MQTT broker settings (address, port, username, password, TLS). 89 | - **root_topic**: The root topic under which all sub-topics will be published. 90 | - **publish_interval**: Time interval (in seconds) between consecutive data publications. 91 | - **topics**: A list of topic configurations. Each entry includes: 92 | - `topics`: List of topic names. 93 | - `tags`: List of tags with their respective data type and generation parameters. 94 | 95 | ## Usage 96 | 97 | 1. Prepare your `config.json` file with the desired settings. 98 | 2. Run the simulator: 99 | ```bash 100 | python mqtt_data_simulator.py 101 | ``` 102 | 103 | 3. The simulator will: 104 | - Connect to the MQTT broker using the settings in `config.json`. 105 | - Continuously publish data to the specified topics at intervals. 106 | 107 | ## MQTT Client Configuration 108 | 109 | The MQTT client is configured with the following parameters: 110 | 111 | - **Username/Password Authentication**: If provided, the simulator will authenticate with the broker using a username and password. 112 | - **TLS Encryption**: The simulator supports secure communication using TLS, configurable through the `config.json` file. Set `use_tls` to `true` and provide the `certfile` and `keyfile` for authentication. 113 | 114 | ## Data Generation Logic 115 | 116 | The simulator can generate different types of data for each tag in the configuration. Here's a detailed explanation of each type and the relevant configuration parameters: 117 | 118 | ### Tag Types 119 | 120 | 1. **Boolean** 121 | - Randomly generates either `True` or `False`. 122 | - Example configuration: 123 | ```json 124 | { 125 | "tag": "door_sensor", 126 | "type": "boolean" 127 | } 128 | ``` 129 | 130 | 2. **Integer and Float (Numeric Ranges)** 131 | - Generates a number (integer or float) within a specified range, optionally increasing or decreasing by a step on each publish cycle. 132 | - **min_value**: The minimum value the tag can generate. 133 | - **max_value**: The maximum value the tag can generate. 134 | - **increment_step**: The value to increment or decrement by on each publish cycle. 135 | - **reset**: Whether to reset the value to `min_value` if it exceeds `max_value`. 136 | - Example configuration: 137 | ```json 138 | { 139 | "tag": "temp_sensor", 140 | "type": "numeric", 141 | "min_value": 10, 142 | "max_value": 35, 143 | "increment_step": 0.5, 144 | "reset": true 145 | } 146 | ``` 147 | 148 | 3. **Constant** 149 | - Always generates the same predefined value for every publish cycle. 150 | - Example configuration: 151 | ```json 152 | { 153 | "tag": "static_value", 154 | "constant": 42 155 | } 156 | ``` 157 | 158 | 4. **Datetime** 159 | - Generates the current UTC timestamp in ISO 8601 format. 160 | - Example configuration: 161 | ```json 162 | { 163 | "tag": "timestamp", 164 | "type": "datetime" 165 | } 166 | ``` 167 | 168 | 5. **String** 169 | - Generates a random string, typically in the format `"SampleString_"`. 170 | - Example configuration: 171 | ```json 172 | { 173 | "tag": "device_id", 174 | "type": "string" 175 | } 176 | ``` 177 | 178 | 6. **UUID** 179 | - Generates a unique identifier (UUID v4) for each publish cycle. 180 | - Example configuration: 181 | ```json 182 | { 183 | "tag": "session_id", 184 | "type": "guid" 185 | } 186 | ``` 187 | 188 | ### Additional Parameters 189 | 190 | 1. **mean** and **deviation** 191 | - These parameters are used to generate values that fall within a range centered around a `mean` value with a random variation defined by `deviation`. The generated value will be between `mean - deviation` and `mean + deviation`. 192 | - Example configuration: 193 | ```json 194 | { 195 | "tag": "random_temperature", 196 | "type": "numeric", 197 | "mean": 25, 198 | "deviation": 5 199 | } 200 | ``` 201 | - This would generate temperatures between 20 and 30. 202 | 203 | 2. **min_value** and **max_value** 204 | - These define the lower and upper bounds for the values a tag can generate. 205 | - They are mainly used with numeric tags to set boundaries for the data. 206 | - Example configuration: 207 | ```json 208 | { 209 | "tag": "pressure_sensor", 210 | "type": "numeric", 211 | "min_value": 50, 212 | "max_value": 100 213 | } 214 | ``` 215 | 216 | 3. **increment_step** 217 | - This defines how much a numeric value should increase or decrease on each cycle. 218 | - The value increments or decrements based on the step size and loops back to `min_value` if `reset` is `true`. 219 | - Example configuration: 220 | ```json 221 | { 222 | "tag": "flow_rate", 223 | "type": "numeric", 224 | "min_value": 1, 225 | "max_value": 10, 226 | "increment_step": 0.5, 227 | "reset": true 228 | } 229 | ``` 230 | 231 | ### Customization and Behavior 232 | 233 | - The **numeric** type can use either the `mean`/`deviation` approach or the `min_value`/`max_value` with `increment_step`. You can choose one depending on your use case. 234 | - The **boolean**, **constant**, **datetime**, **string**, and **UUID** types do not support `min_value`, `max_value`, or `increment_step`. 235 | 236 | These settings allow for flexible data generation to simulate real-world scenarios where sensor values change over time or remain constant. 237 | 238 | ## Error Handling 239 | 240 | The code is wrapped with exception handling using a custom decorator `@handle_exception`. This decorator ensures any errors during function execution are logged, providing detailed error information. Each decorated method in the simulator catches exceptions, logs them, and re-raises the error. 241 | 242 | ## Logging 243 | 244 | The application logs information using Python's `logging` module. By default, logs are displayed in the terminal and include timestamps and logging levels. The logging level can be configured to display different levels of verbosity. 245 | 246 | ### Log Levels 247 | 248 | - `INFO`: General information and success messages (default). 249 | - `DEBUG`: Detailed information about the data generation process. 250 | - `ERROR`: Captures errors during execution, along with the function where they occurred. 251 | 252 | ### Colored Output 253 | 254 | Terminal outputs are colorized using `colorama` to enhance readability: 255 | 256 | - **Green**: Successful MQTT connections. 257 | - **Red**: Errors. 258 | - **Blue**: Published data. 259 | 260 | ## License 261 | 262 | This project is licensed under the MIT License. See the [LICENSE](../../LICENSE) file for more details. -------------------------------------------------------------------------------- /artifacts/mqtt-data-simulator/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "mqtt_broker": { 3 | "address": "broker.hivemq.com", 4 | "port": 1883 5 | }, 6 | "root_topic": "LightningCars", 7 | "publish_interval": 1, 8 | "topics": [ 9 | { 10 | "topics": [ 11 | "Austin/Stamping/Line1/PIBIDT", 12 | "Austin/Welding/Line1/I1IX2I", 13 | "Austin/Painting/Line1/48H0LP", 14 | "Austin/Assembly/Line1/82TFFU", 15 | "Austin/Inspection/Line1/9OMOSD" 16 | ], 17 | "tags": [ 18 | { "tag": "Latitude", "value": 30.2672, "type": "constant" }, 19 | { "tag": "Longitude", "value": -97.7431, "type": "constant" }, 20 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 21 | { "tag": "OperatingTime", "type": "int", "min_value": 60000, "max_value": 64000, "increment_step": 500, "update_interval": 3600 }, 22 | { "tag": "TotalPartsCount", "type": "int", "min_value": 800, "max_value": 1200, "update_interval": 7200 }, 23 | { "tag": "GoodPartsCount", "type": "int", "min_value": 500, "max_value": 900, "update_interval": 7200 }, 24 | { "tag": "IdealCycleTime", "value": 52.0, "type": "constant" }, 25 | { "tag": "EnergyConsumption", "mean": 10.0, "deviation": 0.5, "type": "float" }, 26 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 27 | { "tag": "Shift", "type": "int", "min_value": 1, "max_value": 3, "increment_step": 1, "update_interval": 28800 }, 28 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 29 | ] 30 | }, 31 | { 32 | "topics": [ 33 | "Buffalo/Stamping/Line1/9VR7RO", 34 | "Buffalo/Welding/Line1/JKTB4G", 35 | "Buffalo/Painting/Line1/SDZI4E", 36 | "Buffalo/Assembly/Line1/MHB89D", 37 | "Buffalo/Inspection/Line1/RZ8BO8" 38 | ], 39 | "tags": [ 40 | { "tag": "Latitude", "value": 42.8864, "type": "constant" }, 41 | { "tag": "Longitude", "value": -78.8784, "type": "constant" }, 42 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 43 | { "tag": "OperatingTime", "type": "int", "min_value": 59000, "max_value": 63000, "increment_step": 500, "update_interval": 3600 }, 44 | { "tag": "TotalPartsCount", "type": "int", "min_value": 700, "max_value": 900, "update_interval": 7200 }, 45 | { "tag": "GoodPartsCount", "type": "int", "min_value": 500, "max_value": 800, "update_interval": 7200 }, 46 | { "tag": "IdealCycleTime", "value": 55.0, "type": "constant" }, 47 | { "tag": "EnergyConsumption", "mean": 10.0, "deviation": 0.5, "type": "float" }, 48 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 49 | { "tag": "Shift", "type": "int", "min_value": 4, "max_value": 6, "increment_step": 1, "update_interval": 28800 }, 50 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 51 | ] 52 | }, 53 | { 54 | "topics": [ 55 | "Berlin/Stamping/Line1/7H27L4", 56 | "Berlin/Welding/Line1/LP0HNU", 57 | "Berlin/Painting/Line1/X78TFV", 58 | "Berlin/Assembly/Line1/07RIRW", 59 | "Berlin/Inspection/Line1/NVIIRV" 60 | ], 61 | "tags": [ 62 | { "tag": "Latitude", "value": 52.5200, "type": "constant" }, 63 | { "tag": "Longitude", "value": 13.4050, "type": "constant" }, 64 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 65 | { "tag": "OperatingTime", "type": "int", "min_value": 69000, "max_value": 71000, "increment_step": 500, "update_interval": 3600 }, 66 | { "tag": "TotalPartsCount", "type": "int", "min_value": 1400, "max_value": 1600, "update_interval": 7200 }, 67 | { "tag": "GoodPartsCount", "type": "int", "min_value": 1350, "max_value": 1550, "update_interval": 7200 }, 68 | { "tag": "IdealCycleTime", "value": 35.0, "type": "constant" }, 69 | { "tag": "EnergyConsumption", "mean": 12.0, "deviation": 0.5, "type": "float" }, 70 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 71 | { "tag": "Shift", "type": "int", "min_value": 7, "max_value": 9, "increment_step": 1, "update_interval": 28800 }, 72 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 73 | ] 74 | }, 75 | { 76 | "topics": [ 77 | "Berlin/Stamping/Line2/834C8X", 78 | "Berlin/Welding/Line2/IMHAO0", 79 | "Berlin/Painting/Line2/EJAOET", 80 | "Berlin/Assembly/Line2/MI9VEP", 81 | "Berlin/Inspection/Line2/BI2BI0" 82 | ], 83 | "tags": [ 84 | { "tag": "Latitude", "value": 52.5200, "type": "constant" }, 85 | { "tag": "Longitude", "value": 13.4050, "type": "constant" }, 86 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 87 | { "tag": "OperatingTime", "type": "int", "min_value": 69000, "max_value": 71000, "increment_step": 500, "update_interval": 3600 }, 88 | { "tag": "TotalPartsCount", "type": "int", "min_value": 1400, "max_value": 1600, "update_interval": 7200 }, 89 | { "tag": "GoodPartsCount", "type": "int", "min_value": 1350, "max_value": 1550, "update_interval": 7200 }, 90 | { "tag": "IdealCycleTime", "value": 35.0, "type": "constant" }, 91 | { "tag": "EnergyConsumption", "mean": 12.0, "deviation": 0.5, "type": "float" }, 92 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 93 | { "tag": "Shift", "type": "int", "min_value": 10, "max_value": 12, "increment_step": 1, "update_interval": 28800 }, 94 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 95 | ] 96 | }, 97 | { 98 | "topics": [ 99 | "Shanghai/Mixing/Line1/53FCIU", 100 | "Shanghai/Coating/Line1/0CYBFQ", 101 | "Shanghai/Drying/Line1/0DKBDE", 102 | "Shanghai/Assembly/Line1/LFR8PP", 103 | "Shanghai/Inspection/Line1/DR2FB3" 104 | ], 105 | "tags": [ 106 | { "tag": "Latitude", "value": 31.2304, "type": "constant" }, 107 | { "tag": "Longitude", "value": 121.4737, "type": "constant" }, 108 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 109 | { "tag": "OperatingTime", "type": "int", "min_value": 68000, "max_value": 70000, "increment_step": 500, "update_interval": 3600 }, 110 | { "tag": "TotalPartsCount", "type": "int", "min_value": 1200, "max_value": 1500, "update_interval": 7200 }, 111 | { "tag": "GoodPartsCount", "type": "int", "min_value": 1150, "max_value": 1450, "update_interval": 7200 }, 112 | { "tag": "IdealCycleTime", "value": 38.0, "type": "constant" }, 113 | { "tag": "EnergyConsumption", "mean": 11.0, "deviation": 0.5, "type": "float" }, 114 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 115 | { "tag": "Shift", "type": "int", "min_value": 13, "max_value": 15, "increment_step": 1, "update_interval": 28800 }, 116 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 117 | ] 118 | } 119 | ] 120 | } -------------------------------------------------------------------------------- /artifacts/mqtt-data-simulator/requirements.txt: -------------------------------------------------------------------------------- 1 | paho-mqtt 2 | colorama 3 | pyfiglet -------------------------------------------------------------------------------- /artifacts/mqtt-data-simulator/simulator.py: -------------------------------------------------------------------------------- 1 | import json 2 | import paho.mqtt.client as mqtt 3 | import random 4 | import time 5 | from datetime import datetime, timezone 6 | import uuid 7 | import ssl 8 | import colorama 9 | import logging 10 | import pyfiglet 11 | from typing import Any, Dict, List, Optional, Callable 12 | 13 | # Initialize Colorama for colored terminal output 14 | colorama.init(autoreset=True) 15 | 16 | # Configure logging with a configurable logging level 17 | logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') 18 | 19 | 20 | class ConfigConstants: 21 | """Constants for configuration settings.""" 22 | CONFIG_FILE_PATH = "/app/config.json" # Path to the configuration file 23 | DEFAULT_PUBLISH_INTERVAL = 1 # Default publish interval in seconds 24 | UNS_COMPONENT_COUNT = 5 # Number of expected components in UNS 25 | DEFAULT_LOGGING_LEVEL = logging.INFO # Default logging level 26 | 27 | 28 | def handle_exception(func: Callable) -> Callable: 29 | """ 30 | Decorator to handle exceptions and log them with context. 31 | """ 32 | def wrapper(*args, **kwargs): 33 | try: 34 | return func(*args, **kwargs) 35 | except Exception as e: 36 | logging.error(f"{colorama.Fore.RED}Error in {func.__name__}: {str(e)}") 37 | raise 38 | return wrapper 39 | 40 | 41 | class MqttDataSimulator: 42 | """ 43 | Simulate MQTT data publishing based on a configuration. 44 | Handles MQTT client configuration, data generation, and publishing. 45 | """ 46 | 47 | def __init__(self, config_file_path: str = ConfigConstants.CONFIG_FILE_PATH): 48 | self.config_file_path = config_file_path 49 | self.client: Optional[mqtt.Client] = None 50 | self.config = self.load_config() 51 | self.configure_mqtt() 52 | 53 | def display_banner(self) -> None: 54 | """Display an ASCII banner with author and project info.""" 55 | banner = pyfiglet.figlet_format("MQTT Data Simulator") 56 | author_info = f"{colorama.Fore.CYAN}Author: Christophe Crémon\n" 57 | website_info = f"{colorama.Fore.CYAN}Website: https://github.com/chriscrcodes" 58 | print(f"{colorama.Fore.YELLOW}{banner}{colorama.Style.RESET_ALL}{author_info}{website_info}") 59 | 60 | @handle_exception 61 | def load_config(self) -> Dict[str, Any]: 62 | """ 63 | Load configuration from a JSON file. 64 | 65 | Raises: 66 | FileNotFoundError: If the config file is missing. 67 | json.JSONDecodeError: If the config file contains invalid JSON. 68 | """ 69 | with open(self.config_file_path, 'r') as file: 70 | config = json.load(file) 71 | self.validate_config(config) 72 | return config 73 | 74 | @handle_exception 75 | def validate_config(self, config: Dict[str, Any]) -> None: 76 | """ 77 | Validate the configuration settings. 78 | 79 | Ensures required fields are present and applies defaults where needed. 80 | 81 | Raises: 82 | ValueError: If required fields are missing. 83 | """ 84 | if 'mqtt_broker' not in config: 85 | raise ValueError("MQTT broker configuration is missing.") 86 | config.setdefault('publish_interval', ConfigConstants.DEFAULT_PUBLISH_INTERVAL) 87 | 88 | @handle_exception 89 | def configure_mqtt(self) -> None: 90 | """ 91 | Configure MQTT client settings, including authentication and TLS. 92 | Applies necessary settings for secure communication if required. 93 | """ 94 | self.client = mqtt.Client(protocol=mqtt.MQTTv5) 95 | mqtt_broker = self.config['mqtt_broker'] 96 | self.client.username_pw_set(mqtt_broker.get('username'), mqtt_broker.get('password')) 97 | 98 | if mqtt_broker.get('use_tls', False): 99 | self.setup_tls(mqtt_broker) 100 | 101 | def setup_tls(self, mqtt_broker: Dict[str, Any]) -> None: 102 | """ 103 | Set up TLS for the MQTT client if required. 104 | 105 | Args: 106 | mqtt_broker (Dict[str, Any]): Configuration settings for the MQTT broker. 107 | """ 108 | self.client.tls_set( 109 | certfile=mqtt_broker.get('certfile'), 110 | keyfile=mqtt_broker.get('keyfile'), 111 | cert_reqs=ssl.CERT_REQUIRED, 112 | tls_version=ssl.PROTOCOL_TLSv1_2 113 | ) 114 | 115 | @handle_exception 116 | def connect_mqtt(self) -> None: 117 | """ 118 | Connect to the MQTT broker using settings from the configuration. 119 | Logs the connection status. 120 | """ 121 | mqtt_broker = self.config['mqtt_broker'] 122 | self.client.connect(mqtt_broker['address'], mqtt_broker['port']) 123 | logging.info(f"{colorama.Fore.GREEN}Connected to MQTT broker at {mqtt_broker['address']}:{mqtt_broker['port']} 🚀") 124 | 125 | def handle_increment_step(self, tag_config: Dict[str, Any]) -> Any: 126 | """ 127 | Handle the logic for incrementing or decrementing tag values over time. 128 | Ensures values stay within configured bounds and optionally resets when exceeding max_value. 129 | """ 130 | current_value = tag_config.get('current_value', tag_config['min_value']) 131 | now = datetime.now(timezone.utc) 132 | 133 | tag_config.setdefault('last_update', now) 134 | elapsed_time = (now - tag_config['last_update']).total_seconds() 135 | 136 | if elapsed_time >= tag_config.get('update_interval', 0): 137 | current_value = self.update_value(tag_config, current_value) 138 | tag_config['current_value'] = current_value 139 | tag_config['last_update'] = now 140 | logging.debug(f"Updated current value to {current_value} (step: {tag_config.get('increment_step', 0)})") 141 | 142 | return current_value 143 | 144 | def update_value(self, tag_config: Dict[str, Any], current_value: float) -> float: 145 | """ 146 | Update the value based on increment and decrement steps while enforcing limits. 147 | 148 | Args: 149 | tag_config (Dict[str, Any]): Configuration for the tag. 150 | current_value (float): The current value of the tag. 151 | 152 | Returns: 153 | float: The updated current value. 154 | """ 155 | step = tag_config.get('increment_step', 0) - tag_config.get('decrement_step', 0) 156 | current_value = max(tag_config['min_value'], min(current_value + step, tag_config['max_value'])) 157 | 158 | if current_value > tag_config['max_value']: 159 | current_value = tag_config['min_value'] if tag_config.get('reset', False) else tag_config['max_value'] 160 | 161 | return current_value 162 | 163 | @handle_exception 164 | def generate_data(self, tag_config: Dict[str, Any]) -> Any: 165 | """ 166 | Generate data based on the tag configuration. 167 | 168 | Supports various data types such as constants, booleans, numeric ranges, and more. 169 | """ 170 | if 'constant' in tag_config: 171 | logging.debug(f"Generating constant value: {tag_config['constant']}") 172 | return tag_config['constant'] 173 | 174 | tag_type = tag_config.get('type') 175 | 176 | if tag_type == 'boolean': 177 | value = random.choice([True, False]) 178 | logging.debug(f"Generated boolean value: {value}") 179 | return value 180 | 181 | if tag_config.get('mean') is not None and tag_config.get('deviation') is not None: 182 | return self.generate_mean_deviation_value(tag_config) 183 | 184 | if 'min_value' in tag_config and 'max_value' in tag_config: 185 | return self.handle_increment_step(tag_config) 186 | 187 | return self.handle_other_data_types(tag_config) 188 | 189 | def generate_mean_deviation_value(self, tag_config: Dict[str, Any]) -> float: 190 | """ 191 | Generate a value based on mean and deviation. 192 | 193 | Args: 194 | tag_config (Dict[str, Any]): Configuration for the tag. 195 | 196 | Returns: 197 | float: The generated value within the specified mean and deviation. 198 | """ 199 | value = round(random.uniform(tag_config['mean'] - tag_config['deviation'], 200 | tag_config['mean'] + tag_config['deviation']), 2) 201 | logging.debug(f"Generated value in range ({tag_config['mean'] - tag_config['deviation']}, " 202 | f"{tag_config['mean'] + tag_config['deviation']}): {value}") 203 | return value 204 | 205 | @handle_exception 206 | def handle_other_data_types(self, tag_config: Dict[str, Any]) -> Any: 207 | """ 208 | Handle non-numeric data types such as datetime, string, and UUID. 209 | """ 210 | tag_type = tag_config['type'] 211 | if tag_type == 'datetime': 212 | value = datetime.now(timezone.utc).isoformat() 213 | logging.debug(f"Generated datetime value: {value}") 214 | return value 215 | elif tag_type == 'string': 216 | value = f"SampleString_{random.randint(1, 100)}" 217 | logging.debug(f"Generated string value: {value}") 218 | return value 219 | elif tag_type == 'guid': 220 | value = str(uuid.uuid4()) 221 | logging.debug(f"Generated GUID value: {value}") 222 | return value 223 | return tag_config.get('value') 224 | 225 | @handle_exception 226 | def publish_data(self, root_topic: str, topics: List[str], data: Dict[str, Any]) -> None: 227 | """ 228 | Publish generated data to specified MQTT topics. 229 | 230 | Includes UNS (Unified Namespace) parsing and data enrichment. 231 | """ 232 | data['Timestamp'] = datetime.now(timezone.utc).isoformat() 233 | 234 | for topic in topics: 235 | self.publish_to_topic(root_topic, topic, data) 236 | 237 | def publish_to_topic(self, root_topic: str, topic: str, data: Dict[str, Any]) -> None: 238 | """ 239 | Publish data to a specific topic. 240 | 241 | Args: 242 | root_topic (str): The root topic for the MQTT message. 243 | topic (str): The specific topic to publish to. 244 | data (Dict[str, Any]): The data to publish. 245 | """ 246 | full_topic = f"{root_topic}/{topic}" 247 | data['UNS'] = full_topic # Update UNS with the current topic 248 | 249 | uns_components = data['UNS'].split('/') 250 | if len(uns_components) == ConfigConstants.UNS_COMPONENT_COUNT: 251 | data.update(dict(zip(['Enterprise', 'Site', 'Area', 'Line', 'Cell'], uns_components))) 252 | 253 | self.client.publish(full_topic, json.dumps(data)) 254 | logging.info(f"{colorama.Fore.BLUE}{data['Timestamp']} - Published data to topic '{full_topic}': {data} 📡") 255 | 256 | def start_publishing(self) -> None: 257 | """ 258 | Start the main publishing loop. 259 | 260 | Continuously generates and publishes data to configured topics at intervals. 261 | """ 262 | try: 263 | self.connect_mqtt() 264 | self.client.loop_start() 265 | self.run_publishing_loop() 266 | except KeyboardInterrupt: 267 | logging.info(f"{colorama.Fore.YELLOW}Stopping the publisher... 🛑") 268 | except Exception as e: 269 | logging.error(f"{colorama.Fore.RED}An error occurred during publishing: {str(e)}") 270 | finally: 271 | self.client.loop_stop() 272 | self.client.disconnect() 273 | 274 | def run_publishing_loop(self) -> None: 275 | """Run the publishing loop to generate and send data.""" 276 | while True: 277 | for topic_config in self.config['topics']: 278 | topics = topic_config['topics'] 279 | data = {tag['tag']: self.generate_data(tag) for tag in topic_config.get('tags', [])} 280 | self.publish_data(self.config['root_topic'], topics, data) 281 | time.sleep(self.config['publish_interval']) 282 | 283 | if __name__ == "__main__": 284 | try: 285 | simulator = MqttDataSimulator() 286 | simulator.display_banner() 287 | simulator.start_publishing() 288 | except Exception as e: 289 | logging.error(f"{colorama.Fore.RED}Fatal error in the simulator: {str(e)}") -------------------------------------------------------------------------------- /artifacts/templates/azure-iot-operations/dataflows/bronze-to-silver.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: connectivity.iotoperations.azure.com/v1 2 | kind: Dataflow 3 | metadata: 4 | name: bronze-to-silver 5 | namespace: azure-iot-operations 6 | spec: 7 | profileRef: default 8 | mode: Enabled 9 | operations: 10 | - operationType: Source 11 | sourceSettings: 12 | endpointRef: default 13 | dataSources: 14 | - LightningCars/# # 15 | - operationType: BuiltInTransformation 16 | builtInTransformationSettings: 17 | map: 18 | - inputs: 19 | - '*' 20 | output: '*' 21 | - inputs: 22 | - '$context(products).ProductId' 23 | output: ProductId 24 | - inputs: 25 | - '$context(operators).Hours' 26 | output: ShiftHours 27 | - inputs: 28 | - '$context(operators).EmployeeId' 29 | output: EmployeeId 30 | datasets: 31 | - key: products 32 | inputs: 33 | - '$source.Cell' # - $1 34 | - '$context(products).Cell' # - $2 35 | expression: '$1 == $2' 36 | - key: operators 37 | inputs: 38 | - '$source.Shift' # - $1 39 | - '$context(operators).Shift' # - $2 40 | expression: '$1 == $2' 41 | - operationType: Destination 42 | destinationSettings: 43 | endpointRef: default 44 | dataDestination: Silver -------------------------------------------------------------------------------- /artifacts/templates/azure-iot-operations/dataflows/dss/dss_set: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/19d5489edd9325c66f7d0a13786e2a7bd8652f10/artifacts/templates/azure-iot-operations/dataflows/dss/dss_set -------------------------------------------------------------------------------- /artifacts/templates/azure-iot-operations/dataflows/dss/operators.json: -------------------------------------------------------------------------------- 1 | { "Shift" : 1, "Hours" : "00:00-08:00", "EmployeeId" : 12153730 } 2 | { "Shift" : 2, "Hours" : "08:00-16:00", "EmployeeId" : 14388871 } 3 | { "Shift" : 3, "Hours" : "16:00-00:00", "EmployeeId" : 15864874 } 4 | { "Shift" : 4, "Hours" : "00:00-08:00", "EmployeeId" : 21067389 } 5 | { "Shift" : 5, "Hours" : "08:00-16:00", "EmployeeId" : 39198113 } 6 | { "Shift" : 6, "Hours" : "16:00-00:00", "EmployeeId" : 52621674 } 7 | { "Shift" : 7, "Hours" : "00:00-08:00", "EmployeeId" : 55179229 } 8 | { "Shift" : 8, "Hours" : "08:00-16:00", "EmployeeId" : 59398151 } 9 | { "Shift" : 9, "Hours" : "16:00-00:00", "EmployeeId" : 61244427 } 10 | { "Shift" : 10, "Hours" : "00:00-08:00", "EmployeeId" : 62041664 } 11 | { "Shift" : 11, "Hours" : "08:00-16:00", "EmployeeId" : 65751693 } 12 | { "Shift" : 12, "Hours" : "16:00-00:00", "EmployeeId" : 72440646 } 13 | { "Shift" : 13, "Hours" : "00:00-08:00", "EmployeeId" : 74091564 } 14 | { "Shift" : 14, "Hours" : "08:00-16:00", "EmployeeId" : 90159413 } 15 | { "Shift" : 15, "Hours" : "16:00-00:00", "EmployeeId" : 93757252 } -------------------------------------------------------------------------------- /artifacts/templates/azure-iot-operations/dataflows/dss/products.json: -------------------------------------------------------------------------------- 1 | { "Cell" : "7H27L4", "ProductId" : 12850046 } 2 | { "Cell" : "LP0HNU", "ProductId" : 12850046 } 3 | { "Cell" : "X78TFV", "ProductId" : 12850046 } 4 | { "Cell" : "07RIRW", "ProductId" : 12850046 } 5 | { "Cell" : "NVIIRV", "ProductId" : 12850046 } 6 | { "Cell" : "834C8X", "ProductId" : 12850046 } 7 | { "Cell" : "IMHAO0", "ProductId" : 12850046 } 8 | { "Cell" : "EJAOET", "ProductId" : 12850046 } 9 | { "Cell" : "MI9VEP", "ProductId" : 12850046 } 10 | { "Cell" : "BI2BI0", "ProductId" : 12850046 } 11 | { "Cell" : "PIBIDT", "ProductId" : 63182614 } 12 | { "Cell" : "I1IX2I", "ProductId" : 63182614 } 13 | { "Cell" : "48H0LP", "ProductId" : 63182614 } 14 | { "Cell" : "82TFFU", "ProductId" : 63182614 } 15 | { "Cell" : "9OMOSD", "ProductId" : 63182614 } 16 | { "Cell" : "9VR7RO", "ProductId" : 80176078 } 17 | { "Cell" : "JKTB4G", "ProductId" : 80176078 } 18 | { "Cell" : "SDZI4E", "ProductId" : 80176078 } 19 | { "Cell" : "MHB89D", "ProductId" : 80176078 } 20 | { "Cell" : "RZ8BO8", "ProductId" : 80176078 } 21 | { "Cell" : "53FCIU", "ProductId" : 95216290 } 22 | { "Cell" : "0CYBFQ", "ProductId" : 95216290 } 23 | { "Cell" : "0DKBDE", "ProductId" : 95216290 } 24 | { "Cell" : "LFR8PP", "ProductId" : 95216290 } 25 | { "Cell" : "DR2FB3", "ProductId" : 95216290 } -------------------------------------------------------------------------------- /artifacts/templates/azure-iot-operations/dataflows/json-after-enrichment.json: -------------------------------------------------------------------------------- 1 | { 2 | "Area": "Inspection", 3 | "Cell": "DR2FB3", 4 | "Downtime": 13.18, 5 | "EmployeeId": "90159413", 6 | "EnergyConsumption": 10.86, 7 | "Enterprise": "LightningCars", 8 | "GoodPartsCount": 1150, 9 | "IdealCycleTime": 38.0, 10 | "Latitude": 31.2304, 11 | "Line": "Line1", 12 | "Longitude": 121.4737, 13 | "OperatingTime": 68000, 14 | "PlannedProductionTime": 72000, 15 | "ProductId": 95216290, 16 | "Shift": 14, 17 | "ShiftHours": "08:00-16:00", 18 | "Site": "Shanghai", 19 | "Temperature": 87.13, 20 | "Timestamp": "2024-10-28T13:03:04.952031+00:00", 21 | "TotalPartsCount": 1200, 22 | "UNS": "LightningCars/Shanghai/Inspection/Line1/DR2FB3" 23 | } -------------------------------------------------------------------------------- /artifacts/templates/azure-iot-operations/dataflows/json-before-enrichment.json: -------------------------------------------------------------------------------- 1 | { 2 | "Latitude": 31.2304, 3 | "Longitude": 121.4737, 4 | "PlannedProductionTime": 72000, 5 | "OperatingTime": 68000, 6 | "TotalPartsCount": 1200, 7 | "GoodPartsCount": 1150, 8 | "IdealCycleTime": 38, 9 | "EnergyConsumption": 10.86, 10 | "Temperature": 87.13, 11 | "Shift": 14, 12 | "Downtime": 13.18, 13 | "Timestamp": "2024-10-28T13:03:04.952031+00:00", 14 | "UNS": "LightningCars/Shanghai/Inspection/Line1/DR2FB3", 15 | "Enterprise": "LightningCars", 16 | "Site": "Shanghai", 17 | "Area": "Inspection", 18 | "Line": "Line1", 19 | "Cell": "DR2FB3" 20 | } -------------------------------------------------------------------------------- /artifacts/templates/azure-iot-operations/dataflows/silver-to-cloud.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: connectivity.iotoperations.azure.com/v1 2 | kind: DataflowEndpoint 3 | metadata: 4 | name: azure-eventhub 5 | namespace: azure-iot-operations 6 | spec: 7 | endpointType: Kafka 8 | kafkaSettings: 9 | host: '.servicebus.windows.net:9093' 10 | batching: 11 | latencyMs: 0 12 | maxMessages: 100 13 | tls: 14 | mode: Enabled 15 | authentication: 16 | method: SystemAssignedManagedIdentity 17 | systemAssignedManagedIdentitySettings: 18 | audience: https://.servicebus.windows.net 19 | --- 20 | apiVersion: connectivity.iotoperations.azure.com/v1 21 | kind: Dataflow 22 | metadata: 23 | name: silver-to-cloud 24 | namespace: azure-iot-operations 25 | spec: 26 | profileRef: default 27 | operations: 28 | - operationType: source 29 | sourceSettings: 30 | endpointRef: default 31 | dataSources: 32 | - Silver/# 33 | - operationType: destination 34 | destinationSettings: 35 | endpointRef: azure-eventhub 36 | dataDestination: -------------------------------------------------------------------------------- /artifacts/templates/deploy/1_cloud-provision.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Provision Azure Cloud Resources 3 | hosts: localhost 4 | gather_facts: no 5 | vars_files: 6 | - variables.yaml 7 | 8 | tasks: 9 | - name: Select Azure Subscription 10 | ansible.builtin.shell: az account set --subscription "{{ SUBSCRIPTION_ID }}" 11 | 12 | - name: Register Azure Resource Providers for Azure IoT Operations 13 | ansible.builtin.shell: az provider register --name "{{ item }}" 14 | loop: 15 | - Microsoft.ExtendedLocation 16 | - Microsoft.Kubernetes 17 | - Microsoft.KubernetesConfiguration 18 | - Microsoft.IoTOperations 19 | - Microsoft.DeviceRegistry 20 | - Microsoft.SecretSyncController 21 | 22 | - name: Install Azure CLI extension for Azure IoT Operations 23 | ansible.builtin.shell: az extension add --upgrade --name azure-iot-ops 24 | 25 | - name: Create Azure Resource Group 26 | ansible.builtin.shell: az group create --location "{{ LOCATION }}" --resource-group "{{ RESOURCE_GROUP }}" --subscription "{{ SUBSCRIPTION_ID }}" 27 | 28 | - name: Create Service Principal to manage Azure from Edge Cluster 29 | ansible.builtin.shell: az ad sp create-for-rbac --name "{{ AIO_SERVICE_PRINCIPAL }}" --role Contributor --scopes "/subscriptions/{{ SUBSCRIPTION_ID }}/resourceGroups/{{ RESOURCE_GROUP }}" 30 | register: aio_sp 31 | 32 | - name: Create Managed Identity for Azure IoT Operations (components) 33 | ansible.builtin.shell: az identity create --resource-group "{{ RESOURCE_GROUP }}" --name "{{ AIO_MANAGED_IDENTITY_COMPONENTS }}" 34 | register: managed_identity_components 35 | 36 | - name: Create Managed Identity for Azure IoT Operations (secrets) 37 | ansible.builtin.shell: az identity create --resource-group "{{ RESOURCE_GROUP }}" --name "{{ AIO_MANAGED_IDENTITY_SECRETS }}" 38 | register: managed_identity_secrets 39 | 40 | - name: Create Azure Storage Account 41 | ansible.builtin.shell: az storage account create --name "{{ STORAGEACCOUNT_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" --enable-hierarchical-namespace 42 | register: storageaccount 43 | 44 | - name: Create Azure Key Vault 45 | ansible.builtin.shell: az keyvault create --enable-rbac-authorization false --name "{{ KEYVAULT_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" 46 | register: keyvault 47 | 48 | - name: Create Azure IoT Operations Schema Registry 49 | ansible.builtin.shell: az iot ops schema registry create --name "aio" --resource-group "{{ RESOURCE_GROUP }}" --registry-namespace "{{ AIO_SCHEMA_REGISTRY_NAMESPACE }}" --sa-resource-id "{{ storageaccount.stdout | from_json | json_query('id') }}" 50 | register: schemaregistry 51 | 52 | - name: Assign 'Role Based Access Control Administrator' role to Service Principal 53 | ansible.builtin.shell: az role assignment create --assignee "{{ aio_sp.stdout | from_json | json_query('appId') }}" --role "Role Based Access Control Administrator" --scope "subscriptions/{{ SUBSCRIPTION_ID }}/resourceGroups/{{ RESOURCE_GROUP }}" 54 | 55 | - name: Assign 'Key Vault Secrets Officer' role to Managed Identity for Azure IoT Operations (secrets) 56 | ansible.builtin.shell: az role assignment create --assignee "{{ managed_identity_secrets.stdout | from_json | json_query('principalId') }}" --role "Key Vault Secrets Officer" --scope "{{ keyvault.stdout | from_json | json_query('id') }}" 57 | 58 | - name: Create Event Hub Namespace 59 | ansible.builtin.shell: az eventhubs namespace create --name "{{ EVENTHUB_NAMESPACE }}" --resource-group "{{ RESOURCE_GROUP }}" --location "{{ LOCATION }}" 60 | 61 | - name: Create Event Hub 62 | ansible.builtin.shell: az eventhubs eventhub create --name "{{ EVENTHUB_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" --namespace-name "{{ EVENTHUB_NAMESPACE }}" 63 | register: eventhub 64 | 65 | - name: Create Event Hub Consumer Group 66 | ansible.builtin.shell: az eventhubs eventhub consumer-group create --consumer-group-name "Fabric" --namespace-name "{{ EVENTHUB_NAMESPACE }}" --eventhub-name "{{ EVENTHUB_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" 67 | 68 | - name: Create Event Hub authorization rule 'Listen' 69 | ansible.builtin.shell: az eventhubs eventhub authorization-rule create --resource-group "{{ RESOURCE_GROUP }}" --namespace-name "{{ EVENTHUB_NAMESPACE }}" --eventhub-name "{{ EVENTHUB_NAME }}" --name Listen --rights Listen 70 | 71 | - name: Retrieve Event Hub connection key 72 | ansible.builtin.shell: az eventhubs eventhub authorization-rule keys list --resource-group "{{ RESOURCE_GROUP }}" --namespace-name "{{ EVENTHUB_NAMESPACE }}" --eventhub-name "{{ EVENTHUB_NAME }}" --name Listen --query primaryKey --output tsv 73 | register: eventhub_key 74 | 75 | - name: Create Azure Open AI Service 76 | ansible.builtin.shell: az cognitiveservices account create --name "{{ AZURE_OPENAI_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" --location "eastus" --kind OpenAI --sku s0 --subscription "{{ SUBSCRIPTION_ID }}" 77 | 78 | - name: Create GenAI model in Azure Open AI Service 79 | ansible.builtin.shell: az cognitiveservices account deployment create --resource-group "{{ RESOURCE_GROUP }}" --name "{{ AZURE_OPENAI_NAME }}" --deployment-name "smart-factory" --model-name "gpt-4o" --model-version "2024-08-06" --model-format OpenAI --sku-capacity 1000 --sku-name "GlobalStandard" 80 | 81 | - name: Create Service Principal for the GenAI Factory Agent 82 | ansible.builtin.shell: az ad sp create-for-rbac --name "{{ FACTORY_AGENT_SERVICE_PRINCIPAL }}" 83 | register: factory_agent_sp 84 | 85 | - name: Retrieve Object ID for Azure Arc service 86 | ansible.builtin.shell: az ad sp show --id bc313c14-388c-4e7d-a58e-70017303ee3b --query id --output tsv 87 | register: arc_object_id 88 | 89 | - name: Insert variables information in variables.yaml file 90 | ansible.builtin.blockinfile: 91 | path: ./variables.yaml 92 | block: | 93 | AIO_SP_APPID: "{{ aio_sp.stdout | from_json | json_query('appId') }}" 94 | AIO_SP_SECRET: "{{ aio_sp.stdout | from_json | json_query('password') }}" 95 | AIO_SCHEMA_REGISTRY_ID: "{{ schemaregistry.stdout | from_json | json_query('id') }}" 96 | AIO_MANAGED_IDENTITY_SECRETS_ID: "{{ managed_identity_secrets.stdout | from_json | json_query('id') }}" 97 | AIO_MANAGED_IDENTITY_COMPONENTS_ID: "{{ managed_identity_components.stdout | from_json | json_query('id') }}" 98 | TENANT: "{{ aio_sp.stdout | from_json | json_query('tenant') }}" 99 | ARC_OBJECT_ID: "{{ arc_object_id.stdout }}" 100 | KEYVAULT_ID: "{{ keyvault.stdout | from_json | json_query('id') }}" 101 | FACTORY_AGENT_SP_APPID: "{{ factory_agent_sp.stdout | from_json | json_query('appId') }}" 102 | FACTORY_AGENT_SP_SECRET: "{{ factory_agent_sp.stdout | from_json | json_query('password') }}" 103 | EVENTHUB_ID: "{{ eventhub.stdout | from_json | json_query('id') }}" 104 | EVENTHUB_KEY: "{{ eventhub_key.stdout }}" -------------------------------------------------------------------------------- /artifacts/templates/deploy/2_edge-install_aio.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install Azure IoT Operations on Edge Cluster 3 | hosts: localhost 4 | become: true 5 | gather_facts: no 6 | vars_files: 7 | - variables.yaml 8 | 9 | tasks: 10 | - name: Update Ubuntu apt repository 11 | ansible.builtin.apt: 12 | update_cache: yes 13 | 14 | - name: Install Azure CLI 15 | ansible.builtin.shell: curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash 16 | 17 | - name: Install Azure CLI extension for Azure IoT Operations 18 | ansible.builtin.shell: az extension add --upgrade --name azure-iot-ops 19 | 20 | - name: Install Azure CLI extension for Azure Arc connected Kubernetes 21 | ansible.builtin.shell: az extension add --upgrade --name connectedk8s 22 | 23 | - name: Install Kubernetes (K3s) 24 | ansible.builtin.shell: curl -sfL https://get.k3s.io | K3S_KUBECONFIG_MODE="644" INSTALL_K3S_EXEC="server --disable traefik" sh - 25 | 26 | - name: Create .kube directory 27 | ansible.builtin.file: 28 | path: ~/.kube 29 | state: directory 30 | mode: '0755' 31 | 32 | - name: Configure K3s 33 | ansible.builtin.shell: | 34 | sudo KUBECONFIG=~/.kube/config:/etc/rancher/k3s/k3s.yaml kubectl config view --flatten > ~/.kube/merged 35 | mv ~/.kube/merged ~/.kube/config 36 | chmod 0600 ~/.kube/config 37 | export KUBECONFIG=~/.kube/config 38 | kubectl config use-context default 39 | 40 | - name: Set fs.inotify.max_user_instances 41 | ansible.posix.sysctl: 42 | name: fs.inotify.max_user_instances 43 | value: '8192' 44 | sysctl_set: yes 45 | state: present 46 | 47 | - name: Set fs.inotify.max_user_watches 48 | ansible.posix.sysctl: 49 | name: fs.inotify.max_user_watches 50 | value: '524288' 51 | sysctl_set: yes 52 | state: present 53 | 54 | - name: Set fs.file-max 55 | ansible.posix.sysctl: 56 | name: fs.file-max 57 | value: '100000' 58 | sysctl_set: yes 59 | state: present 60 | 61 | - name: Connect to Azure subscription 62 | ansible.builtin.shell: | 63 | az login --service-principal --username "{{ AIO_SP_APPID }}" --password="{{ AIO_SP_SECRET }}" --tenant "{{ TENANT }}" 64 | az account set --subscription "{{ SUBSCRIPTION_ID }}" 65 | 66 | - name: Connect Edge Cluster to Azure Arc 67 | ansible.builtin.shell: > 68 | az connectedk8s connect --name "{{ AIO_CLUSTER_NAME }}" --location "{{ LOCATION }}" --resource-group "{{ RESOURCE_GROUP }}" --subscription "{{ SUBSCRIPTION_ID }}" --enable-oidc-issuer --enable-workload-identity --disable-auto-upgrade 69 | 70 | - name: Configure Azure Arc custom location 71 | ansible.builtin.shell: > 72 | az connectedk8s enable-features --name "{{ AIO_CLUSTER_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" --custom-locations-oid "{{ ARC_OBJECT_ID }}" --features cluster-connect custom-locations 73 | 74 | - name: Retrieve OIDC Issuer Profile 75 | ansible.builtin.shell: > 76 | az connectedk8s show --resource-group "{{ RESOURCE_GROUP }}" --name "{{ AIO_CLUSTER_NAME }}" --query oidcIssuerProfile.issuerUrl --output tsv 77 | register: oidc_issuer_profile 78 | 79 | - name: Configure K3s with OIDC Issuer Profile 80 | ansible.builtin.shell: | 81 | sudo touch /etc/rancher/k3s/config.yaml 82 | sudo bash -c 'cat < /etc/rancher/k3s/config.yaml 83 | kube-apiserver-arg: 84 | - service-account-issuer={{ oidc_issuer_profile.stdout }} 85 | - service-account-max-token-expiration=24h 86 | EOL' 87 | sudo systemctl restart k3s 88 | sudo k3s check-config 89 | 90 | - name: Check Azure IoT Operations prerequisites 91 | ansible.builtin.shell: az iot ops check 92 | 93 | - name: Initialize Azure IoT Operations foundations installation 94 | ansible.builtin.shell: > 95 | az iot ops init --subscription "{{ SUBSCRIPTION_ID }}" --cluster "{{ AIO_CLUSTER_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" 96 | 97 | - name: Install Azure IoT Operations 98 | ansible.builtin.shell: > 99 | az iot ops create --add-insecure-listener --kubernetes-distro K3s --name "{{ AIO_CLUSTER_NAME }}"-aio --cluster "{{ AIO_CLUSTER_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" --sr-resource-id "{{ AIO_SCHEMA_REGISTRY_ID }}" --broker-frontend-replicas 1 --broker-frontend-workers 1 --broker-backend-part 1 --broker-backend-workers 1 --broker-backend-rf 2 --broker-mem-profile Low 100 | 101 | - name: Enable secret sync 102 | ansible.builtin.shell: > 103 | az iot ops secretsync enable --instance "{{ AIO_CLUSTER_NAME }}"-aio --resource-group "{{ RESOURCE_GROUP }}" --mi-user-assigned "{{ AIO_MANAGED_IDENTITY_SECRETS_ID }}" --kv-resource-id "{{ KEYVAULT_ID }}" 104 | 105 | - name: Enable cloud connections sync 106 | ansible.builtin.shell: > 107 | az iot ops identity assign --name "{{ AIO_CLUSTER_NAME }}"-aio --resource-group "{{ RESOURCE_GROUP }}" --mi-user-assigned "{{ AIO_MANAGED_IDENTITY_COMPONENTS_ID }}" 108 | 109 | - name: Authorize Azure IoT Operations to send messages to Azure Event Hub 110 | ansible.builtin.shell: | 111 | AZ_AIO_EXT=$(az k8s-extension list --cluster-name "{{ AIO_CLUSTER_NAME }}" --resource-group "{{ RESOURCE_GROUP }}" --cluster-type connectedClusters --query "[?extensionType=='microsoft.iotoperations'].identity.principalId" --output tsv) 112 | az role assignment create --assignee $AZ_AIO_EXT --role "Azure Event Hubs Data Sender" --scope "{{ EVENTHUB_ID }}" -------------------------------------------------------------------------------- /artifacts/templates/deploy/3_edge-deploy_demo_components.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Deploy demo components 3 | hosts: localhost 4 | become: true 5 | gather_facts: no 6 | vars_files: 7 | - variables.yaml 8 | 9 | tasks: 10 | - name: Download the Distributed State Store tool 11 | ansible.builtin.get_url: 12 | url: https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/dss/dss_set 13 | dest: /tmp/dss_set 14 | mode: '0755' 15 | 16 | - name: Download the 'Operators' Dataset 17 | ansible.builtin.get_url: 18 | url: https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/dss/operators.json 19 | dest: /tmp/operators.json 20 | 21 | - name: Download the 'Products' Dataset 22 | ansible.builtin.get_url: 23 | url: https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/dss/products.json 24 | dest: /tmp/products.json 25 | 26 | - name: Download the Data Flow 'Cloud Connector to Azure Event Hub' 27 | ansible.builtin.get_url: 28 | url: https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/silver-to-cloud.yaml 29 | dest: /tmp/silver-to-cloud.yaml 30 | 31 | - name: Modify the Data Flow 'Cloud Connector to Azure Event Hub' with the name of the Event Hub Namespace 32 | ansible.builtin.replace: 33 | path: /tmp/silver-to-cloud.yaml 34 | regexp: '' 35 | replace: "{{ EVENTHUB_NAMESPACE }}" 36 | 37 | - name: Modify the Data Flow 'Cloud Connector to Azure Event Hub' with the name of the Event Hub 38 | ansible.builtin.replace: 39 | path: /tmp/silver-to-cloud.yaml 40 | regexp: '' 41 | replace: "{{ EVENTHUB_NAME }}" 42 | 43 | - name: Import the 'Operators' Dataset into the Distributed State Store 44 | ansible.builtin.shell: /tmp/dss_set --key operators --file "/tmp/operators.json" --address localhost 45 | 46 | - name: Import the 'Products' Dataset into the Distributed State Store 47 | ansible.builtin.shell: /tmp/dss_set --key products --file "/tmp/products.json" --address localhost 48 | 49 | - name: Deploy Factory MQTT Simulator - Configuration 50 | ansible.builtin.shell: kubectl apply -f https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/k3s/pods/simulator/configuration.yaml 51 | 52 | - name: Deploy Factory MQTT Simulator - Deployment 53 | ansible.builtin.shell: kubectl apply -f https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/k3s/pods/simulator/deployment.yaml 54 | 55 | - name: Deploy the Data Flow 'bronze to silver' 56 | ansible.builtin.shell: kubectl apply -f https://raw.githubusercontent.com/chriscrcodes/talk-to-your-factory/main/artifacts/templates/azure-iot-operations/dataflows/bronze-to-silver.yaml 57 | 58 | - name: Deploy the Data Flow 'Cloud Connector to Azure Event Hub' 59 | ansible.builtin.shell: kubectl apply -f /tmp/silver-to-cloud.yaml -------------------------------------------------------------------------------- /artifacts/templates/deploy/azure-vm.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "deployBastion": { 6 | "type": "string", 7 | "allowedValues": [ 8 | "yes", 9 | "no" 10 | ], 11 | "defaultValue": "no", 12 | "metadata": { 13 | "description": "Connect to VM via Bastion Host." 14 | } 15 | }, 16 | "vmName": { 17 | "type": "string", 18 | "defaultValue": "azurevm", 19 | "metadata": { 20 | "description": "Virtual Machine name." 21 | } 22 | }, 23 | "vmSize": { 24 | "type": "string", 25 | "defaultValue": "Standard_D4s_v3", 26 | "metadata": { 27 | "description": "Virtual Machine size. Standard_D4s_v3 is recommended." 28 | } 29 | }, 30 | "vmOSVersion": { 31 | "type": "string", 32 | "defaultValue": "Ubuntu-2404", 33 | "metadata": { 34 | "description": "The Ubuntu version for the VM. Ubuntu-2404 is recommended." 35 | } 36 | }, 37 | "vmAdminUsername": { 38 | "type": "string", 39 | "defaultValue": "azurevmadmin", 40 | "metadata": { 41 | "description": "Username for the Virtual Machine." 42 | } 43 | }, 44 | "vmAuthenticationType": { 45 | "type": "string", 46 | "defaultValue": "sshPublicKey", 47 | "allowedValues": [ 48 | "sshPublicKey", 49 | "password" 50 | ], 51 | "metadata": { 52 | "description": "Type of authentication to use on the Virtual Machine. SSH key is recommended." 53 | } 54 | }, 55 | "vmAdminPasswordOrKey": { 56 | "type": "securestring", 57 | "metadata": { 58 | "description": "SSH Key or password for the Virtual Machine. SSH key is recommended." 59 | } 60 | } 61 | }, 62 | "variables": { 63 | "nsgName": "nsg", 64 | "vnetName": "vnet", 65 | "vnetAddressPrefix": "10.0.0.0/16", 66 | "bastionHostname": "bastion", 67 | "bastionSubnetName": "AzureBastionSubnet", 68 | "bastionSubnetAddressPrefix": "10.0.0.0/24", 69 | "bastionPublicIpAddressName": "[format('{0}-pip', variables('bastionHostname'))]", 70 | "vmSubnetName": "[format('{0}-subnet', parameters('vmName'))]", 71 | "vmSubnetAddressPrefix": "10.0.1.0/24", 72 | "vmPublicIpAddressName": "[format('{0}-pip', parameters('vmName'))]", 73 | "vmNetworkInterfaceName": "[format('{0}-nic', parameters('vmName'))]", 74 | "vmDiskType": "Standard_LRS", 75 | "vmImageReferences": { 76 | "Ubuntu-2404": { 77 | "publisher": "Canonical", 78 | "offer": "ubuntu-24_04-lts", 79 | "sku": "server", 80 | "version": "latest" 81 | } 82 | }, 83 | "vmLinuxConfiguration": { 84 | "disablePasswordAuthentication": true, 85 | "ssh": { 86 | "publicKeys": [ 87 | { 88 | "path": "[format('/home/{0}/.ssh/authorized_keys', parameters('vmAdminUsername'))]", 89 | "keyData": "[parameters('vmAdminPasswordOrKey')]" 90 | } 91 | ] 92 | } 93 | }, 94 | "vmSecurityProfileJson": { 95 | "uefiSettings": { 96 | "secureBootEnabled": true, 97 | "vTpmEnabled": true 98 | }, 99 | "securityType": "TrustedLaunch" 100 | }, 101 | "vmExtensionName": "GuestAttestation", 102 | "vmExtensionPublisher": "Microsoft.Azure.Security.LinuxAttestation", 103 | "vmExtensionVersion": "1.0", 104 | "vmMaaTenantName": "GuestAttestation", 105 | "vmMaaEndpoint": "[substring('emptystring', 0, 0)]" 106 | }, 107 | "resources": [ 108 | { 109 | "type": "Microsoft.Network/publicIPAddresses", 110 | "apiVersion": "2022-07-01", 111 | "name": "[variables('vmPublicIpAddressName')]", 112 | "location": "[resourceGroup().location]", 113 | "sku": { 114 | "name": "Standard" 115 | }, 116 | "properties": { 117 | "publicIPAllocationMethod": "Static" 118 | } 119 | }, 120 | { 121 | "condition": "[equals(parameters('deployBastion'), 'yes')]", 122 | "type": "Microsoft.Network/publicIPAddresses", 123 | "apiVersion": "2020-11-01", 124 | "name": "[variables('bastionPublicIpAddressName')]", 125 | "location": "[resourceGroup().location]", 126 | "sku": { 127 | "name": "Standard" 128 | }, 129 | "properties": { 130 | "publicIPAllocationMethod": "Static" 131 | } 132 | }, 133 | { 134 | "condition": "[equals(parameters('deployBastion'), 'yes')]", 135 | "type": "Microsoft.Network/networkSecurityGroups", 136 | "apiVersion": "2022-07-01", 137 | "name": "[variables('nsgName')]", 138 | "location": "[resourceGroup().location]", 139 | "properties": { 140 | "securityRules": [ 141 | { 142 | "name": "AllowHttpsInBound", 143 | "properties": { 144 | "protocol": "Tcp", 145 | "sourcePortRange": "*", 146 | "sourceAddressPrefix": "Internet", 147 | "destinationPortRange": "443", 148 | "destinationAddressPrefix": "*", 149 | "access": "Allow", 150 | "priority": 100, 151 | "direction": "Inbound" 152 | } 153 | }, 154 | { 155 | "name": "AllowGatewayManagerInBound", 156 | "properties": { 157 | "protocol": "Tcp", 158 | "sourcePortRange": "*", 159 | "sourceAddressPrefix": "GatewayManager", 160 | "destinationPortRange": "443", 161 | "destinationAddressPrefix": "*", 162 | "access": "Allow", 163 | "priority": 110, 164 | "direction": "Inbound" 165 | } 166 | }, 167 | { 168 | "name": "AllowLoadBalancerInBound", 169 | "properties": { 170 | "protocol": "Tcp", 171 | "sourcePortRange": "*", 172 | "sourceAddressPrefix": "AzureLoadBalancer", 173 | "destinationPortRange": "443", 174 | "destinationAddressPrefix": "*", 175 | "access": "Allow", 176 | "priority": 120, 177 | "direction": "Inbound" 178 | } 179 | }, 180 | { 181 | "name": "AllowBastionHostCommunicationInBound", 182 | "properties": { 183 | "protocol": "*", 184 | "sourcePortRange": "*", 185 | "sourceAddressPrefix": "VirtualNetwork", 186 | "destinationPortRanges": [ 187 | "8080", 188 | "5701" 189 | ], 190 | "destinationAddressPrefix": "VirtualNetwork", 191 | "access": "Allow", 192 | "priority": 130, 193 | "direction": "Inbound" 194 | } 195 | }, 196 | { 197 | "name": "AllowSshRdpOutBound", 198 | "properties": { 199 | "protocol": "Tcp", 200 | "sourcePortRange": "*", 201 | "sourceAddressPrefix": "*", 202 | "destinationPortRanges": [ 203 | "22", 204 | "3389" 205 | ], 206 | "destinationAddressPrefix": "VirtualNetwork", 207 | "access": "Allow", 208 | "priority": 100, 209 | "direction": "Outbound" 210 | } 211 | }, 212 | { 213 | "name": "AllowAzureCloudCommunicationOutBound", 214 | "properties": { 215 | "protocol": "Tcp", 216 | "sourcePortRange": "*", 217 | "sourceAddressPrefix": "*", 218 | "destinationPortRange": "443", 219 | "destinationAddressPrefix": "AzureCloud", 220 | "access": "Allow", 221 | "priority": 110, 222 | "direction": "Outbound" 223 | } 224 | }, 225 | { 226 | "name": "AllowBastionHostCommunicationOutBound", 227 | "properties": { 228 | "protocol": "*", 229 | "sourcePortRange": "*", 230 | "sourceAddressPrefix": "VirtualNetwork", 231 | "destinationPortRanges": [ 232 | "8080", 233 | "5701" 234 | ], 235 | "destinationAddressPrefix": "VirtualNetwork", 236 | "access": "Allow", 237 | "priority": 120, 238 | "direction": "Outbound" 239 | } 240 | }, 241 | { 242 | "name": "AllowGetSessionInformationOutBound", 243 | "properties": { 244 | "protocol": "*", 245 | "sourcePortRange": "*", 246 | "sourceAddressPrefix": "*", 247 | "destinationAddressPrefix": "Internet", 248 | "destinationPortRanges": [ 249 | "80", 250 | "443" 251 | ], 252 | "access": "Allow", 253 | "priority": 130, 254 | "direction": "Outbound" 255 | } 256 | } 257 | ] 258 | } 259 | }, 260 | { 261 | "condition": "[equals(parameters('deployBastion'), 'no')]", 262 | "type": "Microsoft.Network/networkSecurityGroups", 263 | "apiVersion": "2022-07-01", 264 | "name": "[variables('nsgName')]", 265 | "location": "[resourceGroup().location]", 266 | "properties": { 267 | "securityRules": [ 268 | { 269 | "name": "AllowSSH", 270 | "properties": { 271 | "protocol": "Tcp", 272 | "sourcePortRange": "*", 273 | "destinationPortRange": "22", 274 | "sourceAddressPrefix": "*", 275 | "destinationAddressPrefix": "*", 276 | "access": "Allow", 277 | "priority": 1000, 278 | "direction": "Inbound" 279 | } 280 | }, 281 | { 282 | "name": "AllowMQTT", 283 | "properties": { 284 | "protocol": "Tcp", 285 | "sourcePortRange": "*", 286 | "destinationPortRange": "1883", 287 | "sourceAddressPrefix": "*", 288 | "destinationAddressPrefix": "*", 289 | "access": "Allow", 290 | "priority": 1010, 291 | "direction": "Inbound" 292 | } 293 | }, 294 | { 295 | "name": "AllowOPC", 296 | "properties": { 297 | "protocol": "Tcp", 298 | "sourcePortRange": "*", 299 | "destinationPortRange": "50000", 300 | "sourceAddressPrefix": "*", 301 | "destinationAddressPrefix": "*", 302 | "access": "Allow", 303 | "priority": 1020, 304 | "direction": "Inbound" 305 | } 306 | } 307 | ] 308 | } 309 | }, 310 | { 311 | "type": "Microsoft.Network/virtualNetworks", 312 | "apiVersion": "2021-08-01", 313 | "name": "[variables('vnetName')]", 314 | "location": "[resourceGroup().location]", 315 | "properties": { 316 | "addressSpace": { 317 | "addressPrefixes": [ 318 | "[variables('vnetAddressPrefix')]" 319 | ] 320 | } 321 | } 322 | }, 323 | { 324 | "type": "Microsoft.Network/virtualNetworks/subnets", 325 | "apiVersion": "2021-03-01", 326 | "name": "[format('{0}/{1}', variables('vnetName'), variables('vmSubnetName'))]", 327 | "properties": { 328 | "addressPrefix": "[variables('vmSubnetAddressPrefix')]", 329 | "networkSecurityGroup": { 330 | "id": "[resourceId('Microsoft.Network/networkSecurityGroups', variables('nsgName'))]" 331 | } 332 | }, 333 | "dependsOn": [ 334 | "[resourceId('Microsoft.Network/virtualNetworks', variables('vnetName'))]", 335 | "[resourceId('Microsoft.Network/networkSecurityGroups', variables('nsgName'))]" 336 | ] 337 | }, 338 | { 339 | "condition": "[equals(parameters('deployBastion'), 'yes')]", 340 | "type": "Microsoft.Network/virtualNetworks/subnets", 341 | "apiVersion": "2021-03-01", 342 | "name": "[format('{0}/{1}', variables('vnetName'), variables('bastionSubnetName'))]", 343 | "properties": { 344 | "addressPrefix": "[variables('bastionSubnetAddressPrefix')]", 345 | "networkSecurityGroup": { 346 | "id": "[resourceId('Microsoft.Network/networkSecurityGroups', variables('nsgName'))]" 347 | } 348 | }, 349 | "dependsOn": [ 350 | "[resourceId('Microsoft.Network/virtualNetworks', variables('vnetName'))]", 351 | "[resourceId('Microsoft.Network/networkSecurityGroups', variables('nsgName'))]" 352 | ] 353 | }, 354 | { 355 | "condition": "[equals(parameters('deployBastion'), 'yes')]", 356 | "type": "Microsoft.Network/bastionHosts", 357 | "apiVersion": "2020-11-01", 358 | "name": "[variables('bastionHostName')]", 359 | "location": "[resourceGroup().location]", 360 | "properties": { 361 | "ipConfigurations": [ 362 | { 363 | "name": "bastionHostIpConfig", 364 | "properties": { 365 | "subnet": { 366 | "id": "[resourceId('Microsoft.Network/virtualNetworks/subnets', variables('vnetName'), variables('bastionSubnetName'))]" 367 | }, 368 | "publicIPAddress": { 369 | "id": "[resourceId('Microsoft.Network/publicIPAddresses', variables('bastionPublicIpAddressName'))]" 370 | } 371 | } 372 | } 373 | ] 374 | }, 375 | "dependsOn": [ 376 | "[resourceId('Microsoft.Network/publicIPAddresses', variables('bastionPublicIpAddressName'))]", 377 | "[resourceId('Microsoft.Network/virtualNetworks', variables('vnetName'))]", 378 | "[resourceId('Microsoft.Network/virtualNetworks/subnets', variables('vnetName'), variables('bastionSubnetName'))]" 379 | ] 380 | }, 381 | { 382 | "type": "Microsoft.Network/networkInterfaces", 383 | "apiVersion": "2023-09-01", 384 | "name": "[variables('vmNetworkInterfaceName')]", 385 | "location": "[resourceGroup().location]", 386 | "properties": { 387 | "ipConfigurations": [ 388 | { 389 | "name": "ipconfig1", 390 | "properties": { 391 | "subnet": { 392 | "id": "[resourceId('Microsoft.Network/virtualNetworks/subnets', variables('vnetName'), variables('vmSubnetName'))]" 393 | }, 394 | "privateIPAllocationMethod": "Dynamic", 395 | "publicIPAddress": { 396 | "id": "[resourceId('Microsoft.Network/publicIPAddresses', variables('vmPublicIpAddressName'))]" 397 | } 398 | } 399 | } 400 | ], 401 | "networkSecurityGroup": { 402 | "id": "[resourceId('Microsoft.Network/networkSecurityGroups', variables('nsgName'))]" 403 | } 404 | }, 405 | "dependsOn": [ 406 | "[resourceId('Microsoft.Network/virtualNetworks', variables('vnetName'))]", 407 | "[resourceId('Microsoft.Network/virtualNetworks/subnets', variables('vnetName'), variables('vmSubnetName'))]", 408 | "[resourceId('Microsoft.Network/networkSecurityGroups', variables('nsgName'))]", 409 | "[resourceId('Microsoft.Network/publicIPAddresses', variables('vmPublicIpAddressName'))]" 410 | ] 411 | }, 412 | { 413 | "type": "Microsoft.Compute/virtualMachines", 414 | "apiVersion": "2023-09-01", 415 | "name": "[parameters('vmName')]", 416 | "location": "[resourceGroup().location]", 417 | "properties": { 418 | "hardwareProfile": { 419 | "vmSize": "[parameters('vmSize')]" 420 | }, 421 | "storageProfile": { 422 | "osDisk": { 423 | "createOption": "FromImage", 424 | "managedDisk": { 425 | "storageAccountType": "[variables('vmDiskType')]" 426 | } 427 | }, 428 | "imageReference": "[variables('vmImageReferences')[parameters('vmOSVersion')]]" 429 | }, 430 | "networkProfile": { 431 | "networkInterfaces": [ 432 | { 433 | "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('vmNetworkInterfaceName'))]" 434 | } 435 | ] 436 | }, 437 | "osProfile": { 438 | "computerName": "[parameters('vmName')]", 439 | "adminUsername": "[parameters('vmAdminUsername')]", 440 | "adminPassword": "[parameters('vmAdminPasswordOrKey')]", 441 | "linuxConfiguration": "[if(equals(parameters('vmAuthenticationType'), 'password'), null(), variables('vmLinuxConfiguration'))]" 442 | }, 443 | "securityProfile": "[if(equals(variables('vmSecurityProfileJson').securityType, 'TrustedLaunch'), variables('vmSecurityProfileJson'), null())]" 444 | }, 445 | "dependsOn": [ 446 | "[resourceId('Microsoft.Network/virtualNetworks', variables('vnetName'))]", 447 | "[resourceId('Microsoft.Network/virtualNetworks/subnets', variables('vnetName'), variables('vmSubnetName'))]", 448 | "[resourceId('Microsoft.Network/networkSecurityGroups', variables('nsgName'))]", 449 | "[resourceId('Microsoft.Network/publicIPAddresses', variables('vmPublicIpAddressName'))]", 450 | "[resourceId('Microsoft.Network/networkInterfaces', variables('vmNetworkInterfaceName'))]" 451 | ] 452 | }, 453 | { 454 | "condition": "[and(and(equals(variables('vmSecurityProfileJson').securityType, 'TrustedLaunch'), variables('vmSecurityProfileJson').uefiSettings.secureBootEnabled), variables('vmSecurityProfileJson').uefiSettings.vTpmEnabled)]", 455 | "type": "Microsoft.Compute/virtualMachines/extensions", 456 | "apiVersion": "2023-09-01", 457 | "name": "[format('{0}/{1}', parameters('vmName'), variables('VmExtensionName'))]", 458 | "location": "[resourceGroup().location]", 459 | "properties": { 460 | "publisher": "[variables('vmExtensionPublisher')]", 461 | "type": "[variables('vmExtensionName')]", 462 | "typeHandlerVersion": "[variables('vmExtensionVersion')]", 463 | "autoUpgradeMinorVersion": true, 464 | "enableAutomaticUpgrade": true, 465 | "settings": { 466 | "AttestationConfig": { 467 | "MaaSettings": { 468 | "maaEndpoint": "[variables('vmMaaEndpoint')]", 469 | "maaTenantName": "[variables('vmMaaTenantName')]" 470 | } 471 | } 472 | } 473 | }, 474 | "dependsOn": [ 475 | "[resourceId('Microsoft.Compute/virtualMachines', parameters('vmName'))]" 476 | ] 477 | } 478 | ] 479 | } -------------------------------------------------------------------------------- /artifacts/templates/deploy/variables_template.yaml: -------------------------------------------------------------------------------- 1 | SUBSCRIPTION_ID: "" 2 | LOCATION: "" 3 | RESOURCE_GROUP: "" 4 | KEYVAULT_NAME: "" 5 | STORAGEACCOUNT_NAME: "" 6 | AIO_SERVICE_PRINCIPAL: "" 7 | AIO_MANAGED_IDENTITY_SECRETS: "" 8 | AIO_MANAGED_IDENTITY_COMPONENTS: "" 9 | AIO_SCHEMA_REGISTRY_NAMESPACE: "" 10 | AIO_CLUSTER_NAME: "" 11 | EVENTHUB_NAMESPACE: "" 12 | EVENTHUB_NAME: "" 13 | FACTORY_AGENT_SERVICE_PRINCIPAL: "" 14 | AZURE_OPENAI_NAME: "" -------------------------------------------------------------------------------- /artifacts/templates/fabric/reference-datasets/assets.csv: -------------------------------------------------------------------------------- 1 | Cell,MaintenanceStatus,SerialNumber,MaintenanceDate 2 | 7H27L4,Done,SDL4OYV5,01/09/2024 3 | NVIIRV,Done,HHXW9A9C,02/09/2024 4 | 834C8X,Done,4CLC29B9,03/09/2024 5 | BI2BI0,Done,J9WA8QJF,04/09/2024 6 | 53FCIU,Done,49EIQXR,05/09/2024 7 | 0DKBDE,Done,AFKFW3PD,06/09/2024 8 | DR2FB3,Done,TXI5GVBL,07/09/2024 9 | PIBIDT,Done,OFPAYVJV,08/09/2024 10 | 9OMOSD,Done,X5KRYSV1,09/09/2024 11 | 9VR7RO,Done,6M9HPI0F,10/09/2024 12 | RZ8BO8,Done,8UXUWE53,11/09/2024 13 | X78TFV,Overdue,ZDCJ35BB,01/06/2024 14 | EJAOET,Overdue,IZVN2FKH,02/06/2024 15 | 48H0LP,Overdue,G479VVF3,03/06/2024 16 | SDZI4E,Overdue,KXQ5UR9D,04/06/2024 17 | LP0HNU,Upcoming,KXN5WJG5,01/01/2025 18 | 07RIRW,Upcoming,JWS7BIH8,02/01/2025 19 | IMHAO0,Upcoming,ET9NLL3C,03/01/2025 20 | MI9VEP,Upcoming,NBY997MH,04/01/2025 21 | 0CYBFQ,Upcoming,YFXFW2PP,05/01/2025 22 | LFR8PP,Upcoming,TT2W87EV,06/01/2025 23 | I1IX2I,Upcoming,14MF5WGL,07/01/2025 24 | 82TFFU,Upcoming,XWJU57IY,08/01/2025 25 | JKTB4G,Upcoming,MT0G03RP,09/01/2025 26 | MHB89D,Upcoming,4VKV412K,10/01/2025 -------------------------------------------------------------------------------- /artifacts/templates/fabric/reference-datasets/operators.csv: -------------------------------------------------------------------------------- 1 | EmployeeId,Operator,OperatorPhone,OperatorEmail 2 | 12153730,Steve Harris,+1 512 1234567,steve.harris@lightningcars.com 3 | 14388871,Linda Johnson,+1 512 2345678,linda.johnson@lightningcars.com 4 | 15864874,Mark Williams,+1 512 3456789,mark.williams@lightningcars.com 5 | 21067389,Sarah Miller,+1 716 4567890,sarah.miller@lightningcars.com 6 | 39198113,John Smith,+1 716 5678901,john.smith@lightningcars.com 7 | 52621674,Karen Brown,+1 716 6789012,karen.brown@lightningcars.com 8 | 55179229,Robert Jones,+49 30 7890123,robert.jones@lightningcars.com 9 | 59398151,Emily Davis,+49 30 8901234,emily.davis@lightningcars.com 10 | 61244427,Michael Wilson,+49 30 9012345,michael.wilson@lightningcars.com 11 | 62041664,Jessica Taylor,+49 30 1234568,jessica.taylor@lightningcars.com 12 | 65751693,David Anderson,+49 30 2345679,david.anderson@lightningcars.com 13 | 72440646,Emma Thomas,+49 30 3456780,emma.thomas@lightningcars.com 14 | 74091564,Daniel Martinez,+86 21 4567891,daniel.martinez@lightningcars.com 15 | 90159413,Mary Garcia,+86 21 5678902,mary.garcia@lightningcars.com 16 | 93757252,James Martinez,+86 21 6789013,james.martinez@lightningcars.com -------------------------------------------------------------------------------- /artifacts/templates/fabric/reference-datasets/products.csv: -------------------------------------------------------------------------------- 1 | Cell,ProductId,ProductName 2 | 7H27L4,12850046,"Car Model 1" 3 | LP0HNU,12850046,"Car Model 1" 4 | X78TFV,12850046,"Car Model 1" 5 | 07RIRW,12850046,"Car Model 1" 6 | NVIIRV,12850046,"Car Model 1" 7 | 834C8X,12850046,"Car Model 1" 8 | IMHAO0,12850046,"Car Model 1" 9 | EJAOET,12850046,"Car Model 1" 10 | MI9VEP,12850046,"Car Model 1" 11 | BI2BI0,12850046,"Car Model 1" 12 | PIBIDT,63182614,"Car Model 2" 13 | I1IX2I,63182614,"Car Model 2" 14 | 48H0LP,63182614,"Car Model 2" 15 | 82TFFU,63182614,"Car Model 2" 16 | 9OMOSD,63182614,"Car Model 2" 17 | 9VR7RO,80176078,"Truck Model 1" 18 | JKTB4G,80176078,"Truck Model 1" 19 | SDZI4E,80176078,"Truck Model 1" 20 | MHB89D,80176078,"Truck Model 1" 21 | RZ8BO8,80176078,"Truck Model 1" 22 | 53FCIU,95216290,"Battery Model 1" 23 | 0CYBFQ,95216290,"Battery Model 1" 24 | 0DKBDE,95216290,"Battery Model 1" 25 | LFR8PP,95216290,"Battery Model 1" 26 | DR2FB3,95216290,"Battery Model 1" -------------------------------------------------------------------------------- /artifacts/templates/k3s/flux/mqtt/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../pods/mqtt-client/pod.yaml -------------------------------------------------------------------------------- /artifacts/templates/k3s/flux/simulator/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../pods/simulator/deployment.yaml 5 | - ../../pods/simulator/configuration.yaml -------------------------------------------------------------------------------- /artifacts/templates/k3s/pods/mqtt-client/pod.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: mqtt-client 5 | namespace: azure-iot-operations 6 | spec: 7 | containers: 8 | - image: alpine 9 | name: mqtt-client 10 | command: ["sh", "-c"] 11 | args: ["apk add mosquitto-clients mqttui && sleep infinity"] -------------------------------------------------------------------------------- /artifacts/templates/k3s/pods/opc-plc/opc-plc-deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: opc-plc-000000 5 | namespace: azure-iot-operations 6 | labels: 7 | app.kubernetes.io/component: opcplc-000000 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app.kubernetes.io/component: opcplc-000000 13 | template: 14 | metadata: 15 | labels: 16 | app.kubernetes.io/component: opcplc-000000 17 | spec: 18 | containers: 19 | - name: opc-plc 20 | image: mcr.microsoft.com/iotedge/opc-plc:latest 21 | args: 22 | - "--ph=opcplc-000000" 23 | - "--cdn=opcplc-000000" 24 | - "--ut" 25 | - "--sph" 26 | - "--sn=5" 27 | - "--sr=10" 28 | - "--fn=10" 29 | - "--ftl=212" 30 | - "--ftu=273" 31 | - "--ftr=True" 32 | - "--veryfastrate=1000" 33 | - "--gn=1" 34 | - "--pn=50000" 35 | - "--at=FlatDirectory" 36 | - "--drurs" 37 | ports: 38 | - containerPort: 50000 39 | volumeMounts: 40 | - name: opc-plc-default-application-cert 41 | mountPath: /app/pki/own 42 | - name: opc-plc-trust-list 43 | mountPath: /app/pki/trusted 44 | volumes: 45 | - name: opc-plc-default-application-cert 46 | secret: 47 | secretName: opc-plc-default-application-cert 48 | - name: opc-plc-trust-list 49 | secret: 50 | secretName: opc-plc-trust-list 51 | serviceAccountName: opcplc-000000-service-account 52 | --- 53 | apiVersion: v1 54 | kind: Service 55 | metadata: 56 | name: opcplc-000000 57 | namespace: azure-iot-operations 58 | labels: 59 | app.kubernetes.io/component: opcplc-000000 60 | spec: 61 | type: ClusterIP 62 | selector: 63 | app.kubernetes.io/component: opcplc-000000 64 | ports: 65 | - port: 50000 66 | protocol: TCP 67 | targetPort: 50000 68 | --- 69 | apiVersion: cert-manager.io/v1 70 | kind: Issuer 71 | metadata: 72 | name: opc-plc-self-signed-issuer 73 | namespace: azure-iot-operations 74 | labels: 75 | app.kubernetes.io/component: opcplc-000000 76 | spec: 77 | selfSigned: {} 78 | --- 79 | apiVersion: cert-manager.io/v1 80 | kind: Certificate 81 | metadata: 82 | name: opc-plc-default-application-cert 83 | namespace: azure-iot-operations 84 | labels: 85 | app.kubernetes.io/component: opcplc-000000 86 | spec: 87 | secretName: opc-plc-default-application-cert 88 | duration: 2160h # 90d 89 | renewBefore: 360h # 15d 90 | issuerRef: 91 | name: opc-plc-self-signed-issuer 92 | kind: Issuer 93 | commonName: OpcPlc 94 | dnsNames: 95 | - opcplc-000000 96 | - opcplc-000000.azure-iot-operations.svc.cluster.local 97 | - opcplc-000000.azure-iot-operations 98 | uris: 99 | - urn:OpcPlc:opcplc-000000 100 | usages: 101 | - digital signature 102 | - key encipherment 103 | - data encipherment 104 | - server auth 105 | - client auth 106 | privateKey: 107 | algorithm: RSA 108 | size: 2048 109 | encodeUsagesInRequest: true 110 | isCA: false 111 | --- 112 | apiVersion: v1 113 | kind: Secret 114 | metadata: 115 | name: opc-plc-trust-list 116 | namespace: azure-iot-operations 117 | labels: 118 | app.kubernetes.io/component: opcplc-000000 119 | data: {} 120 | --- 121 | apiVersion: batch/v1 122 | kind: Job 123 | metadata: 124 | name: opcplc-000000-execute-mutual-trust 125 | namespace: azure-iot-operations 126 | labels: 127 | app.kubernetes.io/component: opcplc-000000 128 | spec: 129 | backoffLimit: 1 130 | template: 131 | spec: 132 | containers: 133 | - name: kubectl 134 | image: mcr.microsoft.com/oss/kubernetes/kubectl:v1.27.1 135 | imagePullPolicy: Always 136 | command: ["/bin/sh"] 137 | args: ["/scripts/execute-commands.sh"] 138 | volumeMounts: 139 | - name: scripts 140 | mountPath: /scripts 141 | readOnly: true 142 | restartPolicy: Never 143 | serviceAccountName: opcplc-000000-service-account 144 | volumes: 145 | - name: scripts 146 | configMap: 147 | name: opcplc-000000-execute-commands-script 148 | --- 149 | apiVersion: v1 150 | kind: ConfigMap 151 | metadata: 152 | name: opcplc-000000-execute-commands-script 153 | namespace: azure-iot-operations 154 | labels: 155 | app.kubernetes.io/component: opcplc-000000 156 | data: 157 | execute-commands.sh: | 158 | #!/bin/sh 159 | 160 | # wait 20 seconds for the resources to be created 161 | sleep 20 162 | 163 | # Extract the OPC UA connector application instance certificate and add it to the OPC PLC trust list 164 | cert=$(kubectl -n azure-iot-operations get secret aio-opc-opcuabroker-default-application-cert -o jsonpath='{.data.tls\.crt}' | base64 -d) 165 | data=$(kubectl create secret generic temp --from-literal=opcuabroker.crt="$cert" --dry-run=client -o jsonpath='{.data}') 166 | kubectl patch secret opc-plc-trust-list -n azure-iot-operations -p "{\"data\": $data}" 167 | 168 | # Extract the OPC PLC application instance certificate and add it to the OPC UA connector trust list 169 | cert=$(kubectl -n azure-iot-operations get secret opc-plc-default-application-cert -o jsonpath='{.data.tls\.crt}' | base64 -d) 170 | data=$(kubectl create secret generic temp --from-literal=opcplc-000000.crt="$cert" --dry-run=client -o jsonpath='{.data}') 171 | kubectl patch secret aio-opc-ua-broker-trust-list -n azure-iot-operations -p "{\"data\": $data}" 172 | --- 173 | apiVersion: v1 174 | kind: ServiceAccount 175 | metadata: 176 | name: opcplc-000000-service-account 177 | namespace: azure-iot-operations 178 | labels: 179 | app.kubernetes.io/component: opcplc-000000 180 | --- 181 | apiVersion: rbac.authorization.k8s.io/v1 182 | kind: Role 183 | metadata: 184 | name: opc-plc-000000-secret-access-role 185 | namespace: azure-iot-operations 186 | rules: 187 | - apiGroups: [""] 188 | resources: ["secrets"] 189 | verbs: ["get", "patch"] 190 | --- 191 | apiVersion: rbac.authorization.k8s.io/v1 192 | kind: RoleBinding 193 | metadata: 194 | name: opc-plc-000000-secret-access-rolebinding 195 | namespace: azure-iot-operations 196 | subjects: 197 | - kind: ServiceAccount 198 | name: opcplc-000000-service-account 199 | namespace: azure-iot-operations 200 | roleRef: 201 | kind: Role 202 | name: opc-plc-000000-secret-access-role 203 | apiGroup: rbac.authorization.k8s.io 204 | -------------------------------------------------------------------------------- /artifacts/templates/k3s/pods/simulator/configuration.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: mqtt-data-simulator-config 5 | namespace: azure-iot-operations 6 | data: 7 | config.json: | 8 | { 9 | "mqtt_broker": { 10 | "address": "aio-broker-insecure", 11 | "port": 1883 12 | }, 13 | "root_topic": "LightningCars", 14 | "publish_interval": 1, 15 | "topics": [ 16 | { 17 | "topics": [ 18 | "Austin/Stamping/Line1/PIBIDT", 19 | "Austin/Welding/Line1/I1IX2I", 20 | "Austin/Painting/Line1/48H0LP", 21 | "Austin/Assembly/Line1/82TFFU", 22 | "Austin/Inspection/Line1/9OMOSD" 23 | ], 24 | "tags": [ 25 | { "tag": "Latitude", "value": 30.2672, "type": "constant" }, 26 | { "tag": "Longitude", "value": -97.7431, "type": "constant" }, 27 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 28 | { "tag": "OperatingTime", "type": "int", "min_value": 60000, "max_value": 64000, "increment_step": 500, "update_interval": 3600 }, 29 | { "tag": "TotalPartsCount", "type": "int", "min_value": 800, "max_value": 1200, "update_interval": 7200 }, 30 | { "tag": "GoodPartsCount", "type": "int", "min_value": 500, "max_value": 900, "update_interval": 7200 }, 31 | { "tag": "IdealCycleTime", "value": 52.0, "type": "constant" }, 32 | { "tag": "EnergyConsumption", "mean": 10.0, "deviation": 0.5, "type": "float" }, 33 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 34 | { "tag": "Shift", "type": "int", "min_value": 1, "max_value": 3, "increment_step": 1, "update_interval": 28800 }, 35 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 36 | ] 37 | }, 38 | { 39 | "topics": [ 40 | "Buffalo/Stamping/Line1/9VR7RO", 41 | "Buffalo/Welding/Line1/JKTB4G", 42 | "Buffalo/Painting/Line1/SDZI4E", 43 | "Buffalo/Assembly/Line1/MHB89D", 44 | "Buffalo/Inspection/Line1/RZ8BO8" 45 | ], 46 | "tags": [ 47 | { "tag": "Latitude", "value": 42.8864, "type": "constant" }, 48 | { "tag": "Longitude", "value": -78.8784, "type": "constant" }, 49 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 50 | { "tag": "OperatingTime", "type": "int", "min_value": 59000, "max_value": 63000, "increment_step": 500, "update_interval": 3600 }, 51 | { "tag": "TotalPartsCount", "type": "int", "min_value": 700, "max_value": 900, "update_interval": 7200 }, 52 | { "tag": "GoodPartsCount", "type": "int", "min_value": 500, "max_value": 800, "update_interval": 7200 }, 53 | { "tag": "IdealCycleTime", "value": 55.0, "type": "constant" }, 54 | { "tag": "EnergyConsumption", "mean": 10.0, "deviation": 0.5, "type": "float" }, 55 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 56 | { "tag": "Shift", "type": "int", "min_value": 4, "max_value": 6, "increment_step": 1, "update_interval": 28800 }, 57 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 58 | ] 59 | }, 60 | { 61 | "topics": [ 62 | "Berlin/Stamping/Line1/7H27L4", 63 | "Berlin/Welding/Line1/LP0HNU", 64 | "Berlin/Painting/Line1/X78TFV", 65 | "Berlin/Assembly/Line1/07RIRW", 66 | "Berlin/Inspection/Line1/NVIIRV" 67 | ], 68 | "tags": [ 69 | { "tag": "Latitude", "value": 52.5200, "type": "constant" }, 70 | { "tag": "Longitude", "value": 13.4050, "type": "constant" }, 71 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 72 | { "tag": "OperatingTime", "type": "int", "min_value": 69000, "max_value": 71000, "increment_step": 500, "update_interval": 3600 }, 73 | { "tag": "TotalPartsCount", "type": "int", "min_value": 1400, "max_value": 1600, "update_interval": 7200 }, 74 | { "tag": "GoodPartsCount", "type": "int", "min_value": 1350, "max_value": 1550, "update_interval": 7200 }, 75 | { "tag": "IdealCycleTime", "value": 35.0, "type": "constant" }, 76 | { "tag": "EnergyConsumption", "mean": 12.0, "deviation": 0.5, "type": "float" }, 77 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 78 | { "tag": "Shift", "type": "int", "min_value": 7, "max_value": 9, "increment_step": 1, "update_interval": 28800 }, 79 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 80 | ] 81 | }, 82 | { 83 | "topics": [ 84 | "Berlin/Stamping/Line2/834C8X", 85 | "Berlin/Welding/Line2/IMHAO0", 86 | "Berlin/Painting/Line2/EJAOET", 87 | "Berlin/Assembly/Line2/MI9VEP", 88 | "Berlin/Inspection/Line2/BI2BI0" 89 | ], 90 | "tags": [ 91 | { "tag": "Latitude", "value": 52.5200, "type": "constant" }, 92 | { "tag": "Longitude", "value": 13.4050, "type": "constant" }, 93 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 94 | { "tag": "OperatingTime", "type": "int", "min_value": 69000, "max_value": 71000, "increment_step": 500, "update_interval": 3600 }, 95 | { "tag": "TotalPartsCount", "type": "int", "min_value": 1400, "max_value": 1600, "update_interval": 7200 }, 96 | { "tag": "GoodPartsCount", "type": "int", "min_value": 1350, "max_value": 1550, "update_interval": 7200 }, 97 | { "tag": "IdealCycleTime", "value": 35.0, "type": "constant" }, 98 | { "tag": "EnergyConsumption", "mean": 12.0, "deviation": 0.5, "type": "float" }, 99 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 100 | { "tag": "Shift", "type": "int", "min_value": 10, "max_value": 12, "increment_step": 1, "update_interval": 28800 }, 101 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 102 | ] 103 | }, 104 | { 105 | "topics": [ 106 | "Shanghai/Mixing/Line1/53FCIU", 107 | "Shanghai/Coating/Line1/0CYBFQ", 108 | "Shanghai/Drying/Line1/0DKBDE", 109 | "Shanghai/Assembly/Line1/LFR8PP", 110 | "Shanghai/Inspection/Line1/DR2FB3" 111 | ], 112 | "tags": [ 113 | { "tag": "Latitude", "value": 31.2304, "type": "constant" }, 114 | { "tag": "Longitude", "value": 121.4737, "type": "constant" }, 115 | { "tag": "PlannedProductionTime", "value": 72000, "type": "constant" }, 116 | { "tag": "OperatingTime", "type": "int", "min_value": 68000, "max_value": 70000, "increment_step": 500, "update_interval": 3600 }, 117 | { "tag": "TotalPartsCount", "type": "int", "min_value": 1200, "max_value": 1500, "update_interval": 7200 }, 118 | { "tag": "GoodPartsCount", "type": "int", "min_value": 1150, "max_value": 1450, "update_interval": 7200 }, 119 | { "tag": "IdealCycleTime", "value": 38.0, "type": "constant" }, 120 | { "tag": "EnergyConsumption", "mean": 11.0, "deviation": 0.5, "type": "float" }, 121 | { "tag": "Temperature", "mean": 85.0, "deviation": 3.0, "type": "float" }, 122 | { "tag": "Shift", "type": "int", "min_value": 13, "max_value": 15, "increment_step": 1, "update_interval": 28800 }, 123 | { "tag": "Downtime", "mean": 15, "deviation": 2, "type": "int" } 124 | ] 125 | } 126 | ] 127 | } -------------------------------------------------------------------------------- /artifacts/templates/k3s/pods/simulator/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: mqtt-data-simulator 5 | namespace: azure-iot-operations 6 | spec: 7 | replicas: 1 8 | selector: 9 | matchLabels: 10 | app: mqtt-data-simulator 11 | template: 12 | metadata: 13 | labels: 14 | app: mqtt-data-simulator 15 | spec: 16 | containers: 17 | - name: mqtt-data-simulator 18 | image: ghcr.io/chriscrcodes/mqtt-data-simulator:latest 19 | imagePullPolicy: Always 20 | volumeMounts: 21 | - name: config-volume 22 | mountPath: /app/config.json 23 | subPath: config.json 24 | volumes: 25 | - name: config-volume 26 | configMap: 27 | name: mqtt-data-simulator-config --------------------------------------------------------------------------------