├── .github └── FUNDING.yml ├── .gitignore ├── LICENSE ├── README.md ├── doc ├── Architecture-Overview.png ├── Automation-Gateway.png └── yaml-json-schema.json ├── docker ├── Dockerfile ├── SIEMENS-Industrial-Edge │ ├── config-simatic-panel.yaml │ ├── docker-compose.yml │ └── icononly_transparent.png ├── build.bat ├── build.sh ├── push.sh ├── remove.bat ├── remove.sh ├── setver.sh └── version.txt ├── native ├── .gitignore ├── Dockerfile ├── Dockerfile.build ├── Dockerfile.run ├── README.txt ├── build-config.sh ├── build-native.bat ├── build-native.sh ├── build-source.sh ├── config-example-plc4x.yaml ├── config-example.yaml ├── config-plc4x │ ├── jni-config.json │ ├── predefined-classes-config.json │ ├── proxy-config.json │ ├── reflect-config.json │ ├── resource-config.json │ └── serialization-config.json ├── config │ ├── jni-config.json │ ├── predefined-classes-config.json │ ├── proxy-config.json │ ├── reflect-config.json │ ├── resource-config.json │ └── serialization-config.json ├── logging.properties ├── security │ └── rocworks-gateway.pfx └── source │ └── .gitignore └── source ├── .gitattributes ├── .gitignore ├── app-plc4x ├── .gitignore ├── build.gradle ├── config-alltesting.yaml ├── config-example.yaml └── src │ └── main │ ├── kotlin │ └── App.kt │ └── resources │ ├── logging.properties │ └── simplelogger.properties ├── app ├── .gitignore ├── build.gradle ├── config-example.yaml ├── configs │ ├── config-demo-1.yaml │ ├── config-demo-2.yaml │ ├── config-demo-3.yaml │ ├── config-demo-4.yaml │ ├── config-demo-5.yaml │ ├── config-mass-influx.yaml │ ├── config-mass-iotdb.yaml │ ├── config-milo-kafka.yaml │ ├── config-mqtt-opcua.yaml │ ├── config-mqtt-spb-decoder.yaml │ ├── config-mqtt-spb-influx.yaml │ ├── config-opcua-aggregator.yaml │ ├── config-opcua-mqtt.yaml │ ├── config-opcua-neo4j.yaml │ ├── config-spb-opcua.yaml │ ├── config-test-influx.yaml │ ├── config-test-jdbc.yaml │ ├── config-test-kafka.yaml │ ├── config-test-loggers.yaml │ ├── config-test-mqtt.yaml │ ├── config-test-neo4j.yaml │ ├── config-test-opensearch.yaml │ ├── config-test-questdb.yaml │ └── config-test-zenoh.yaml └── src │ └── main │ ├── kotlin │ └── App.kt │ └── resources │ ├── logging.properties │ └── simplelogger.properties ├── build.gradle ├── buildSrc ├── build.gradle └── src │ └── main │ └── groovy │ ├── gateway.kotlin-application-conventions.gradle │ ├── gateway.kotlin-common-conventions.gradle │ └── gateway.kotlin-library-conventions.gradle ├── gradle-docker.sh ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── lib-core ├── build.gradle └── src │ └── main │ ├── java │ └── at │ │ └── rocworks │ │ └── gateway │ │ └── core │ │ ├── data │ │ ├── CodecDataPoint.java │ │ ├── CodecTopic.java │ │ ├── CodecTopicValue.java │ │ └── GenericCodec.java │ │ └── opcua │ │ ├── HostnameUtil.java │ │ └── KeyStoreLoader.java │ └── kotlin │ └── at │ └── rocworks │ └── gateway │ ├── core │ ├── data │ │ ├── DataPoint.kt │ │ ├── EventBus.kt │ │ ├── Topic.kt │ │ ├── TopicStatus.kt │ │ └── TopicValue.kt │ ├── driver │ │ ├── DriverBase.kt │ │ ├── MonitoredItem.kt │ │ └── Registry.kt │ ├── graphql │ │ ├── ConfigServer.kt │ │ └── GraphQLServer.kt │ ├── logger │ │ ├── ILoggerQueue.kt │ │ ├── LoggerBase.kt │ │ ├── LoggerPublisher.kt │ │ ├── LoggerQueueDisk.kt │ │ └── LoggerQueueMemory.kt │ ├── mqtt │ │ ├── MqttDriver.kt │ │ ├── MqttMonitoredItem.kt │ │ ├── MqttServer.kt │ │ └── MqttServerEndpoint.kt │ ├── opcua │ │ ├── OpcUaDriver.kt │ │ ├── OpcUaServer.kt │ │ ├── driver │ │ │ ├── OpcUaConfig.kt │ │ │ └── OpcUaMonitoredItem.kt │ │ └── server │ │ │ ├── OpcUaGatewayNodes.kt │ │ │ ├── OpcUaNamespace.kt │ │ │ ├── OpcUaSampledNode.kt │ │ │ ├── OpcUaSampledSpace.kt │ │ │ ├── OpcUaServerInstance.kt │ │ │ └── OpcUaServerLimits.kt │ └── service │ │ ├── Common.kt │ │ ├── Component.kt │ │ ├── ComponentHandler.kt │ │ ├── ComponentLogger.kt │ │ ├── ServiceHandler.kt │ │ └── WebConfig.kt │ └── logger │ ├── ImplyLogger.kt │ ├── JdbcLogger.kt │ ├── KafkaLogger.kt │ └── MqttLogger.kt ├── lib-influxdb ├── build.gradle └── src │ └── main │ └── kotlin │ └── at │ └── rocworks │ └── gateway │ └── logger │ └── influx │ ├── InfluxDBLogger.kt │ ├── InfluxDBLoggerV1.kt │ └── InfluxDBLoggerV2.kt ├── lib-iotdb ├── build.gradle └── src │ └── main │ └── kotlin │ └── at │ └── rocworks │ └── gateway │ └── logger │ └── iotdb │ └── IoTDBLogger.kt ├── lib-neo4j ├── build.gradle └── src │ └── main │ └── kotlin │ └── at │ └── rocworks │ └── gateway │ └── logger │ └── neo4j │ └── Neo4jLogger.kt ├── lib-opensearch ├── build.gradle └── src │ └── main │ └── kotlin │ └── at │ └── rocworks │ └── gateway │ └── logger │ └── opensearch │ └── OpenSearchLogger.kt ├── lib-plc4x ├── build.gradle └── src │ └── main │ ├── kotlin │ ├── Plc4xDriver.kt │ └── Plc4xMonitoredItem.kt │ └── resources │ └── logging.properties ├── lib-questdb ├── build.gradle └── src │ └── main │ └── kotlin │ └── at │ └── rocworks │ └── gateway │ └── logger │ └── questdb │ └── QuestDBLogger.kt ├── settings.gradle └── test ├── build.gradle └── src └── main ├── kotlin ├── Test.kt └── TestTopics.kt └── resources └── logging.properties /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: vogler75 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log.? 6 | *.lck 7 | 8 | # BlueJ files 9 | *.ctxt 10 | 11 | # Mobile Tools for Java (J2ME) 12 | .mtj.tmp/ 13 | 14 | # Package Files # 15 | *.jar 16 | *.war 17 | *.nar 18 | *.ear 19 | *.zip 20 | *.tar.gz 21 | *.rar 22 | 23 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 24 | hs_err_pid* 25 | 26 | # Mac 27 | .DS_Store 28 | 29 | # Idea 30 | *.swp 31 | .idea 32 | /target 33 | 34 | .vscode -------------------------------------------------------------------------------- /doc/Architecture-Overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vogler75/automation-gateway/1f8b7fa2129cdf28651d9c9a540999b9cfd34096/doc/Architecture-Overview.png -------------------------------------------------------------------------------- /doc/Automation-Gateway.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vogler75/automation-gateway/1f8b7fa2129cdf28651d9c9a540999b9cfd34096/doc/Automation-Gateway.png -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # Amazon Corretto 2 | #FROM amazoncorretto:17-alpine 3 | 4 | # libstdc++ is needed by QuestDB 5 | #RUN apk add libstdc++ gcompat libc6-compat 6 | 7 | # Eclipse-Temurin also has the listdc++ which is needed by QuestDB 8 | #FROM eclipse-temurin:21 9 | 10 | # Debian 11 | FROM debian:bookworm-slim 12 | 13 | # Set environment variables for non-interactive installations 14 | ENV DEBIAN_FRONTEND=noninteractive 15 | 16 | # Install OpenJDK and clean up 17 | RUN apt-get update && \ 18 | apt-get install -y --no-install-recommends openjdk-17-jdk && \ 19 | apt-get clean && \ 20 | rm -rf /var/lib/apt/lists/* 21 | 22 | # Set JAVA_HOME environment variable 23 | ENV JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64 24 | ENV PATH="$JAVA_HOME/bin:${PATH}" 25 | 26 | # Verify Java installation 27 | RUN java -version 28 | 29 | ARG APP_NAME=app 30 | ADD app.tar / 31 | RUN test "$APP_NAME" = "app" || mv /${APP_NAME} /app 32 | RUN echo "/app/bin/${APP_NAME}" > /run.sh && chmod 755 /run.sh 33 | WORKDIR /app 34 | CMD exec /run.sh 35 | -------------------------------------------------------------------------------- /docker/SIEMENS-Industrial-Edge/config-simatic-panel.yaml: -------------------------------------------------------------------------------- 1 | Drivers: 2 | OpcUa: 3 | - Id: "panel" 4 | Enabled: true 5 | LogLevel: INFO 6 | EndpointUrl: "opc.tcp://172.17.1.1:4890" 7 | UpdateEndpointUrl: true 8 | SecurityPolicy: None 9 | KeepAliveFailuresAllowed: 0 10 | UsernameProvider: 11 | Username: username1 12 | Password: password1 13 | AddressCache: 14 | MaximumSize: 1000 15 | SubscriptionSamplingInterval: 0.0 16 | 17 | Servers: 18 | GraphQL: 19 | - Id: graphql 20 | Enabled: true 21 | LogLevel: INFO 22 | Port: 4000 23 | 24 | Mqtt: 25 | - Id: mqtt 26 | Enabled: true 27 | LogLevel: INFO 28 | Port: 1883 29 | 30 | Loggers: 31 | Mqtt: 32 | - Id: mqtt1 33 | Enabled: true 34 | Host: 192.168.1.30 35 | Format: Raw 36 | Port: 1883 37 | Logging: 38 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Tags/# 39 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Structure instances/# 40 | 41 | Kafka: 42 | - Id: kafka1 43 | Enabled: false 44 | Servers: 192.168.1.30:9092 45 | TopicName: frankenstein 46 | Format: Raw 47 | Logging: 48 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Tags/# 49 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Structure instances/# 50 | 51 | Jdbc: 52 | - Id: timescale1 53 | Enabled: false 54 | Url: jdbc:postgresql://192.168.1.30:5432/postgres 55 | Username: system 56 | Password: manager 57 | Logging: 58 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Tags/# 59 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Structure instances/# 60 | 61 | InfluxDB: 62 | - Id: influxdb1 63 | Enabled: false 64 | LogLevel: INFO 65 | Url: http://192.168.1.30:8086 66 | Database: frankenstein 67 | Logging: 68 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Tags/# 69 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Structure instances/# 70 | 71 | IoTDB: 72 | - Id: iotdb1 73 | Enabled: false 74 | Host: 192.168.1.30 75 | Port: 6667 76 | Database: root.frankenstein 77 | Username: "root" 78 | Password: "root" 79 | Logging: 80 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Tags/# 81 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Structure instances/# 82 | 83 | OpenSearch: 84 | - Id: opensearch1 85 | Enabled: false 86 | Host: 192.168.1.30 87 | Port: 9200 88 | Index: frankenstein 89 | Logging: 90 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Tags/# 91 | - Topic: opc/panel/path/Objects/HmiRuntime/HMI_RT_2/Structure instances/# 92 | -------------------------------------------------------------------------------- /docker/SIEMENS-Industrial-Edge/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2.4' 2 | services: 3 | frankenstein: 4 | environment: 5 | GATEWAY_CONFIG: /cfg-data/config-simatic-panel.yaml 6 | GATEWAY_CONFIG_RETRY: 5 7 | GATEWAY_CONFIG_HTTP: 80 8 | ports: 9 | - '30000:80' 10 | - '30001:1883' 11 | - '30002:4000' 12 | restart: unless-stopped 13 | image: 'rocworks/automation-gateway:1.34' 14 | hostname: frankenstein 15 | volumes: 16 | - './cfg-data/:/cfg-data/' 17 | - './security/:/app/security/' 18 | - './publish/:/publish/' 19 | mem_limit: 512mb 20 | -------------------------------------------------------------------------------- /docker/SIEMENS-Industrial-Edge/icononly_transparent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vogler75/automation-gateway/1f8b7fa2129cdf28651d9c9a540999b9cfd34096/docker/SIEMENS-Industrial-Edge/icononly_transparent.png -------------------------------------------------------------------------------- /docker/build.bat: -------------------------------------------------------------------------------- 1 | copy ..\source\app\build\distributions\app.tar app.tar 2 | docker build --build-arg APP_NAME=app -t frankenstein-app . 3 | del app.tar 4 | 5 | copy ..\source\cluster\gateway\build\distributions\gateway.tar app.tar 6 | docker build --build-arg APP_NAME=gateway -t frankenstein-gateway . 7 | del app.tar 8 | 9 | copy ..\source\cluster\opcua\build\distributions\opcua.tar app.tar 10 | docker build --build-arg APP_NAME=opcua -t frankenstein-opcua . 11 | del app.tar 12 | 13 | copy ..\source\cluster\plc4x\build\distributions\plc4x.tar app.tar 14 | docker build --build-arg APP_NAME=plc4x -t frankenstein-plc4x . 15 | del app.tar 16 | 17 | copy ..\source\cluster\cache\build\distributions\cache.tar app.tar 18 | docker build --build-arg APP_NAME=cache -t frankenstein-cache . 19 | del app.tar 20 | 21 | copy ..\source\cluster\influxdb\build\distributions\influxdb.tar app.tar 22 | docker build --build-arg APP_NAME=influxdb -t frankenstein-influxdb . 23 | del app.tar 24 | 25 | copy ..\source\cluster\influxdb\build\distributions\iotdb.tar app.tar 26 | docker build --build-arg APP_NAME=iotdb -t frankenstein-iotdb . 27 | del app.tar 28 | 29 | copy ..\source\cluster\influxdb\build\distributions\kafka.tar app.tar 30 | docker build --build-arg APP_NAME=kafka -t frankenstein-kafka . 31 | del app.tar 32 | -------------------------------------------------------------------------------- /docker/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | build() { 4 | app=${1:-app} 5 | sub=${2:-.} 6 | ver=${3:-latest} 7 | 8 | echo $app $sub $ver 9 | 10 | if [ $sub = "." ]; then 11 | name="rocworks/automation-gateway:$ver" 12 | app_with_path=../source/$app/build/distributions/$app.tar 13 | else 14 | app=$app-$sub 15 | name="rocworks/automation-gateway:$ver-$sub" 16 | app_with_path=../source/$app/build/distributions/$app.tar 17 | fi 18 | 19 | if [ -f $app_with_path ]; then 20 | echo $app 21 | cp $app_with_path ./app.tar 22 | docker build --build-arg APP_NAME=$app -t $name . 23 | rm ./app.tar 24 | else 25 | echo "Please build the app ${app_with_path} with gradle first!" 26 | fi 27 | } 28 | 29 | ver=${2:-`cat version.txt`} 30 | 31 | build app . $ver 32 | build app plc4x $ver 33 | -------------------------------------------------------------------------------- /docker/push.sh: -------------------------------------------------------------------------------- 1 | version=`cat version.txt` 2 | 3 | docker push rocworks/automation-gateway:${version} 4 | #docker push rocworks/automation-gateway:latest 5 | 6 | docker push rocworks/automation-gateway:${version}-plc4x 7 | #docker push rocworks/automation-gateway:latest-plc4x 8 | -------------------------------------------------------------------------------- /docker/remove.bat: -------------------------------------------------------------------------------- 1 | docker rmi frankenstein-app 2 | docker rmi frankenstein-gateway 3 | docker rmi frankenstein-opcua 4 | docker rmi frankenstein-plc4x 5 | docker rmi frankenstein-dds 6 | docker rmi frankenstein-cache 7 | docker rmi frankenstein-influxdb 8 | docker rmi frankenstein-iotdb 9 | docker rmi frankenstein-kafka -------------------------------------------------------------------------------- /docker/remove.sh: -------------------------------------------------------------------------------- 1 | docker rmi frankenstein-app 2 | docker rmi frankenstein-gateway 3 | docker rmi frankenstein-opcua 4 | docker rmi frankenstein-plc4x 5 | docker rmi frankenstein-dds 6 | docker rmi frankenstein-cache 7 | docker rmi frankenstein-influxdb 8 | docker rmi frankenstein-iotdb 9 | docker rmi frankenstein-kafka -------------------------------------------------------------------------------- /docker/setver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | v=`cat version.txt` 3 | version="${1:-$v}" 4 | if [ $version = "none" ]; then 5 | echo "usage $0 " 6 | else 7 | echo "tag version $version" 8 | docker tag rocworks/automation-gateway:latest rocworks/automation-gateway:$version 9 | docker tag rocworks/automation-gateway:latest-plc4x rocworks/automation-gateway:${version}-plc4x 10 | fi 11 | -------------------------------------------------------------------------------- /docker/version.txt: -------------------------------------------------------------------------------- 1 | 1.37.1 2 | -------------------------------------------------------------------------------- /native/.gitignore: -------------------------------------------------------------------------------- 1 | app 2 | config.yaml 3 | *.exe 4 | *.dll 5 | app.build_artifacts.txt 6 | classpath.txt 7 | -------------------------------------------------------------------------------- /native/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use the smallest Debian base image 2 | FROM debian:bookworm-slim 3 | 4 | # Install any essential dependencies your "app" needs (if any) 5 | # Example: 6 | # RUN apt-get update && apt-get install -y \ 7 | # libssl-dev \ 8 | # # ... other dependencies ... \ 9 | # && rm -rf /var/lib/apt/lists/* 10 | 11 | # Create a directory for your application 12 | WORKDIR /app 13 | 14 | # Copy your application executable into the container 15 | COPY app . 16 | COPY logging.properties . 17 | 18 | # Make your application executable (if necessary) 19 | RUN chmod +x /app/app 20 | 21 | # Set the entrypoint to directly run your application 22 | ENTRYPOINT ["/app/app"] 23 | 24 | -------------------------------------------------------------------------------- /native/Dockerfile.build: -------------------------------------------------------------------------------- 1 | docker build -t frankenstein . 2 | -------------------------------------------------------------------------------- /native/Dockerfile.run: -------------------------------------------------------------------------------- 1 | docker run -ti --rm -v $PWD/config.yaml:/app/config.yaml frankenstein 2 | -------------------------------------------------------------------------------- /native/README.txt: -------------------------------------------------------------------------------- 1 | ###################### 2 | graalvm-jdk-17.0.8+9.1 3 | openjdk 17.0.8.1 2023-08-22 LTS 4 | ###################### 5 | 6 | ###################### 7 | # Linux 8 | ###################### 9 | sudo apt-get install build-essential libz-dev zlib1g-dev 10 | sudo yum install zlib-devel 11 | 12 | ###################### 13 | # Windows 14 | ###################### 15 | Download and Install Visual Stdio Build Tools: 16 | https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools&rel=16 17 | 18 | Check the 19 | * Desktop development with C++ box in the main window. 20 | * On the right side under Installation Details, choose Windows 10 SDK 21 | Click the Install button. 22 | 23 | After the installation completes, reboot your system. 24 | 25 | On Windows, the native-image builder will only work when it’s executed from the x64 Native Tools Command Prompt 26 | > "C:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools\VC\Auxiliary\Build\vcvars64.bat" 27 | or 28 | > "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvars64.bat" 29 | 30 | ###################### 31 | # Build 32 | ###################### 33 | > build-source.sh # build and copy java distribution of app to source and get classpath 34 | > build-config.sh # start the java distribution with agento to collect GraalVM configs 35 | > build-native.sh # build native image with GraalVM 36 | 37 | ###################### 38 | # Notes 39 | ###################### 40 | 41 | Lin: tar xf ../source/app/build/distributions/app.tar -C source 42 | Win: powershell -command "Expand-Archive ../source/app/build/distributions/app.zip source" 43 | 44 | Set JAVA_OPTS="-agentlib:native-image-agent=config-merge-dir=config" to enable the agent. 45 | > set JAVA_OPTS=-agentlib:native-image-agent=config-merge-dir=config 46 | > export JAVA_OPTS=-agentlib:native-image-agent=config-merge-dir=config 47 | 48 | Copy the CLASSPATH from app\bin\app to the build.sh script! 49 | -------------------------------------------------------------------------------- /native/build-config.sh: -------------------------------------------------------------------------------- 1 | export JAVA_OPTS=-agentlib:native-image-agent=config-merge-dir=config$1 2 | ./source/app$1/bin/app$1 3 | -------------------------------------------------------------------------------- /native/build-native.bat: -------------------------------------------------------------------------------- 1 | echo "Unzip app.zip to here." 2 | set APP_HOME=app 3 | 4 | echo "Copy CLASSPATH from app\bin\app.bat!" 5 | set CLASSPATH=%APP_HOME%\lib\app.jar;%APP_HOME%\lib\lib-influxdb.jar;%APP_HOME%\lib\lib-jdbc.jar;%APP_HOME%\lib\lib-kafka.jar;%APP_HOME%\lib\lib-iotdb.jar;%APP_HOME%\lib\lib-core.jar;%APP_HOME%\lib\vertx-lang-kotlin-4.4.6.jar;%APP_HOME%\lib\influxdb-java-2.21.jar;%APP_HOME%\lib\logging-interceptor-4.9.0.jar;%APP_HOME%\lib\kotlin-stdlib-jdk8-1.8.22.jar;%APP_HOME%\lib\vertx-config-yaml-4.4.6.jar;%APP_HOME%\lib\vertx-service-discovery-4.4.6.jar;%APP_HOME%\lib\vertx-mqtt-4.4.6.jar;%APP_HOME%\lib\vertx-web-graphql-4.4.6.jar;%APP_HOME%\lib\vertx-kafka-client-4.4.6.jar;%APP_HOME%\lib\vertx-config-4.4.6.jar;%APP_HOME%\lib\vertx-web-4.4.6.jar;%APP_HOME%\lib\vertx-web-common-4.4.6.jar;%APP_HOME%\lib\vertx-auth-common-4.4.6.jar;%APP_HOME%\lib\vertx-bridge-common-4.4.6.jar;%APP_HOME%\lib\vertx-core-4.4.6.jar;%APP_HOME%\lib\postgresql-42.2.27.jar;%APP_HOME%\lib\mysql-connector-java-8.0.28.jar;%APP_HOME%\lib\crate-jdbc-2.6.0.jar;%APP_HOME%\lib\kotlin-stdlib-jdk7-1.8.22.jar;%APP_HOME%\lib\converter-moshi-2.9.0.jar;%APP_HOME%\lib\retrofit-2.9.0.jar;%APP_HOME%\lib\okhttp-4.9.0.jar;%APP_HOME%\lib\okio-jvm-2.8.0.jar;%APP_HOME%\lib\kotlin-stdlib-1.8.22.jar;%APP_HOME%\lib\kotlin-stdlib-common-1.8.22.jar;%APP_HOME%\lib\dictionary-reader-0.6.3.jar;%APP_HOME%\lib\sdk-client-0.6.11.jar;%APP_HOME%\lib\rxjava-2.2.21.jar;%APP_HOME%\lib\tahu-core-1.0.5.jar;%APP_HOME%\lib\slf4j-simple-1.7.32.jar;%APP_HOME%\lib\iotdb-session-1.2.2.jar;%APP_HOME%\lib\waffle-jna-1.7.5.jar;%APP_HOME%\lib\jcl-over-slf4j-1.7.12.jar;%APP_HOME%\lib\graphql-java-20.4.jar;%APP_HOME%\lib\isession-1.2.2.jar;%APP_HOME%\lib\service-rpc-1.2.2.jar;%APP_HOME%\lib\iotdb-thrift-1.2.2.jar;%APP_HOME%\lib\iotdb-thrift-commons-1.2.2.jar;%APP_HOME%\lib\logback-classic-1.2.11.jar;%APP_HOME%\lib\kafka-clients-3.5.0.jar;%APP_HOME%\lib\stack-client-0.6.11.jar;%APP_HOME%\lib\sdk-core-0.6.11.jar;%APP_HOME%\lib\bsd-parser-0.6.3.jar;%APP_HOME%\lib\bsd-core-0.6.3.jar;%APP_HOME%\lib\stack-core-0.6.11.jar;%APP_HOME%\lib\java-dataloader-3.2.0.jar;%APP_HOME%\lib\libthrift-0.14.1.jar;%APP_HOME%\lib\slf4j-api-2.0.7.jar;%APP_HOME%\lib\netty-handler-proxy-4.1.100.Final.jar;%APP_HOME%\lib\netty-codec-http2-4.1.100.Final.jar;%APP_HOME%\lib\netty-codec-http-4.1.100.Final.jar;%APP_HOME%\lib\netty-resolver-dns-4.1.100.Final.jar;%APP_HOME%\lib\netty-handler-4.1.100.Final.jar;%APP_HOME%\lib\netty-codec-mqtt-4.1.100.Final.jar;%APP_HOME%\lib\netty-transport-native-unix-common-4.1.100.Final.jar;%APP_HOME%\lib\netty-codec-socks-4.1.100.Final.jar;%APP_HOME%\lib\netty-codec-dns-4.1.100.Final.jar;%APP_HOME%\lib\netty-codec-4.1.100.Final.jar;%APP_HOME%\lib\netty-transport-4.1.100.Final.jar;%APP_HOME%\lib\netty-buffer-4.1.100.Final.jar;%APP_HOME%\lib\netty-resolver-4.1.100.Final.jar;%APP_HOME%\lib\netty-common-4.1.100.Final.jar;%APP_HOME%\lib\jackson-databind-2.15.0.jar;%APP_HOME%\lib\jackson-annotations-2.15.0.jar;%APP_HOME%\lib\jackson-core-2.15.0.jar;%APP_HOME%\lib\snakeyaml-2.0.jar;%APP_HOME%\lib\guava-31.0.1-jre.jar;%APP_HOME%\lib\checker-qual-3.12.0.jar;%APP_HOME%\lib\protobuf-java-3.16.3.jar;%APP_HOME%\lib\jna-platform-4.2.1.jar;%APP_HOME%\lib\jna-4.2.1.jar;%APP_HOME%\lib\org.osgi.enterprise-4.2.0.jar;%APP_HOME%\lib\org.osgi.core-4.3.1.jar;%APP_HOME%\lib\client-1.0.0-beta.2.jar;%APP_HOME%\lib\annotations-13.0.jar;%APP_HOME%\lib\reactive-streams-1.0.3.jar;%APP_HOME%\lib\commons-compress-1.21.jar;%APP_HOME%\lib\org.eclipse.paho.client.mqttv3-1.2.5.jar;%APP_HOME%\lib\commons-io-2.11.0.jar;%APP_HOME%\lib\logback-core-1.2.11.jar;%APP_HOME%\lib\msgpack-core-0.8.21.jar;%APP_HOME%\lib\commons-jexl3-3.2.1.jar;%APP_HOME%\lib\fastdoubleparser-0.8.0.jar;%APP_HOME%\lib\common-1.0.0-beta.2.jar;%APP_HOME%\lib\netty-channel-fsm-0.8.jar;%APP_HOME%\lib\jaxb-runtime-2.3.6.jar;%APP_HOME%\lib\jakarta.activation-1.2.2.jar;%APP_HOME%\lib\moshi-1.8.0.jar;%APP_HOME%\lib\zstd-jni-1.5.5-1.jar;%APP_HOME%\lib\lz4-java-1.8.0.jar;%APP_HOME%\lib\snappy-java-1.1.10.0.jar;%APP_HOME%\lib\tsfile-1.2.2.jar;%APP_HOME%\lib\slice-0.41.jar;%APP_HOME%\lib\jol-core-0.2.jar;%APP_HOME%\lib\commons-logging-1.2.jar;%APP_HOME%\lib\bcpkix-jdk18on-1.75.jar;%APP_HOME%\lib\bcutil-jdk18on-1.75.jar;%APP_HOME%\lib\bcprov-jdk18on-1.75.jar;%APP_HOME%\lib\strict-machine-0.6.jar;%APP_HOME%\lib\failureaccess-1.0.1.jar;%APP_HOME%\lib\listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar;%APP_HOME%\lib\jsr305-3.0.2.jar;%APP_HOME%\lib\error_prone_annotations-2.7.1.jar;%APP_HOME%\lib\j2objc-annotations-1.3.jar;%APP_HOME%\lib\jakarta.xml.bind-api-2.3.3.jar;%APP_HOME%\lib\txw2-2.3.6.jar;%APP_HOME%\lib\istack-commons-runtime-3.0.12.jar;%APP_HOME%\lib\javax.annotation-api-1.3.2.jar 6 | 7 | native-image --no-fallback -H:IncludeResources='.*/.*properties$' ^ 8 | -H:ReflectionConfigurationFiles=config/reflect-config.json ^ 9 | -H:DynamicProxyConfigurationFiles=config/proxy-config.json ^ 10 | --initialize-at-build-time=org.slf4j ^ 11 | --initialize-at-run-time=io.netty.util.internal.logging.Log4JLogger ^ 12 | --initialize-at-run-time=io.netty.handler.ssl.JettyNpnSslEngine ^ 13 | --initialize-at-run-time=io.netty.handler.ssl.ConscryptAlpnSslEngine ^ 14 | --initialize-at-run-time=io.netty.handler.ssl.JdkNpnApplicationProtocolNegotiator ^ 15 | --initialize-at-run-time=io.netty.handler.ssl.OpenSslAsyncPrivateKeyMethod ^ 16 | --initialize-at-run-time=io.netty.handler.ssl.OpenSslPrivateKeyMethod ^ 17 | --initialize-at-run-time=io.netty.handler.ssl.ReferenceCountedOpenSslEngine ^ 18 | --initialize-at-run-time=io.netty.handler.ssl.BouncyCastleAlpnSslUtils ^ 19 | --initialize-at-run-time=io.netty.handler.codec.compression.BrotliOptions ^ 20 | --initialize-at-run-time=io.netty.internal.tcnative.AsyncSSLPrivateKeyMethod ^ 21 | --initialize-at-run-time=io.netty.internal.tcnative.CertificateVerifier ^ 22 | --initialize-at-run-time=io.netty.internal.tcnative.SSL ^ 23 | --initialize-at-run-time=io.netty.internal.tcnative.SSLPrivateKeyMethod ^ 24 | --initialize-at-run-time=io.netty.internal.tcnative.CertificateCompressionAlgo ^ 25 | -cp %CLASSPATH% App 26 | 27 | -------------------------------------------------------------------------------- /native/build-native.sh: -------------------------------------------------------------------------------- 1 | export APP_HOME=source/app$1 2 | 3 | CLASSPATH=`cat classpath.txt | envsubst` 4 | echo $CLASSPATH 5 | 6 | native-image -march=compatibility --no-fallback -H:IncludeResources='.*/.*properties$' \ 7 | -H:ReflectionConfigurationFiles=config$1/reflect-config.json \ 8 | -H:DynamicProxyConfigurationFiles=config$1/proxy-config.json \ 9 | --initialize-at-build-time=org.slf4j \ 10 | --initialize-at-run-time=io.netty.util.internal.logging.Log4JLogger \ 11 | --initialize-at-run-time=io.netty.handler.ssl.JettyNpnSslEngine \ 12 | --initialize-at-run-time=io.netty.handler.ssl.ConscryptAlpnSslEngine \ 13 | --initialize-at-run-time=io.netty.handler.ssl.JdkNpnApplicationProtocolNegotiator \ 14 | --initialize-at-run-time=io.netty.handler.ssl.OpenSslAsyncPrivateKeyMethod \ 15 | --initialize-at-run-time=io.netty.handler.ssl.OpenSslPrivateKeyMethod \ 16 | --initialize-at-run-time=io.netty.handler.ssl.ReferenceCountedOpenSslEngine \ 17 | --initialize-at-run-time=io.netty.handler.ssl.BouncyCastleAlpnSslUtils \ 18 | --initialize-at-run-time=io.netty.handler.codec.compression.BrotliOptions \ 19 | --initialize-at-run-time=io.netty.internal.tcnative.AsyncSSLPrivateKeyMethod \ 20 | --initialize-at-run-time=io.netty.internal.tcnative.CertificateVerifier \ 21 | --initialize-at-run-time=io.netty.internal.tcnative.SSL \ 22 | --initialize-at-run-time=io.netty.internal.tcnative.SSLPrivateKeyMethod \ 23 | --initialize-at-run-time=io.netty.internal.tcnative.CertificateCompressionAlgo \ 24 | -cp $CLASSPATH App 25 | 26 | -------------------------------------------------------------------------------- /native/build-source.sh: -------------------------------------------------------------------------------- 1 | echo "Build..." 2 | cd ../source 3 | ./gradlew build 4 | 5 | echo "Unpack..." 6 | cd ../native 7 | rm -rf source/app$1 8 | tar xf ../source/app$1/build/distributions/app$1.tar -C source 9 | echo `cat source/app$1/bin/app$1 | grep "^CLASSPATH" | sed "s/^CLASSPATH=//"` > classpath.txt 10 | 11 | echo "Ready." 12 | -------------------------------------------------------------------------------- /native/config-plc4x/jni-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name":"[Lcom.sun.management.internal.DiagnosticCommandArgumentInfo;" 4 | }, 5 | { 6 | "name":"[Lcom.sun.management.internal.DiagnosticCommandInfo;" 7 | }, 8 | { 9 | "name":"com.sun.management.internal.DiagnosticCommandArgumentInfo", 10 | "methods":[{"name":"","parameterTypes":["java.lang.String","java.lang.String","java.lang.String","java.lang.String","boolean","boolean","boolean","int"] }] 11 | }, 12 | { 13 | "name":"com.sun.management.internal.DiagnosticCommandInfo", 14 | "methods":[{"name":"","parameterTypes":["java.lang.String","java.lang.String","java.lang.String","java.lang.String","java.lang.String","java.lang.String","boolean","java.util.List"] }] 15 | }, 16 | { 17 | "name":"java.lang.Boolean", 18 | "methods":[{"name":"getBoolean","parameterTypes":["java.lang.String"] }] 19 | }, 20 | { 21 | "name":"java.lang.ClassLoader", 22 | "methods":[{"name":"getPlatformClassLoader","parameterTypes":[] }, {"name":"loadClass","parameterTypes":["java.lang.String"] }] 23 | }, 24 | { 25 | "name":"java.util.Arrays", 26 | "methods":[{"name":"asList","parameterTypes":["java.lang.Object[]"] }] 27 | }, 28 | { 29 | "name":"jdk.internal.loader.ClassLoaders$PlatformClassLoader" 30 | }, 31 | { 32 | "name":"org.graalvm.jniutils.JNIExceptionWrapperEntryPoints", 33 | "methods":[{"name":"getClassName","parameterTypes":["java.lang.Class"] }] 34 | }, 35 | { 36 | "name":"sun.management.VMManagementImpl", 37 | "fields":[{"name":"compTimeMonitoringSupport"}, {"name":"currentThreadCpuTimeSupport"}, {"name":"objectMonitorUsageSupport"}, {"name":"otherThreadCpuTimeSupport"}, {"name":"remoteDiagnosticCommandsSupport"}, {"name":"synchronizerUsageSupport"}, {"name":"threadAllocatedMemorySupport"}, {"name":"threadContentionMonitoringSupport"}] 38 | } 39 | ] 40 | -------------------------------------------------------------------------------- /native/config-plc4x/predefined-classes-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type":"agent-extracted", 4 | "classes":[ 5 | ] 6 | } 7 | ] 8 | 9 | -------------------------------------------------------------------------------- /native/config-plc4x/proxy-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "interfaces":["org.apache.iotdb.service.rpc.thrift.IClientRPCService$Iface"] 4 | }, 5 | { 6 | "interfaces":["org.influxdb.impl.InfluxDBService"] 7 | } 8 | ] 9 | -------------------------------------------------------------------------------- /native/config-plc4x/resource-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources":{ 3 | "includes":[{ 4 | "pattern":"\\QMETA-INF/MANIFEST.MF\\E" 5 | }, { 6 | "pattern":"\\QMETA-INF/services/io.vertx.config.spi.ConfigProcessor\\E" 7 | }, { 8 | "pattern":"\\QMETA-INF/services/io.vertx.config.spi.ConfigStoreFactory\\E" 9 | }, { 10 | "pattern":"\\QMETA-INF/services/io.vertx.core.spi.VertxServiceProvider\\E" 11 | }, { 12 | "pattern":"\\QMETA-INF/services/org.apache.plc4x.java.api.PlcDriver\\E" 13 | }, { 14 | "pattern":"\\QMETA-INF/services/org.apache.plc4x.java.spi.transport.Transport\\E" 15 | }, { 16 | "pattern":"\\Qkafka/kafka-version.properties\\E" 17 | }, { 18 | "pattern":"\\Qlogback.xml\\E" 19 | }, { 20 | "pattern":"\\Qlogging.properties\\E" 21 | }, { 22 | "pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E" 23 | }, { 24 | "pattern":"\\Qsimplelogger.properties\\E" 25 | }, { 26 | "pattern":"java.base:\\Qjdk/internal/icu/impl/data/icudt67b/nfkc.nrm\\E" 27 | }, { 28 | "pattern":"java.base:\\Qjdk/internal/icu/impl/data/icudt67b/uprops.icu\\E" 29 | }, { 30 | "pattern":"java.base:\\Qsun/net/idn/uidna.spp\\E" 31 | }, { 32 | "pattern":"java.base:\\Qsun/text/resources/nfkc.icu\\E" 33 | }, { 34 | "pattern":"java.base:\\Qsun/text/resources/uprops.icu\\E" 35 | }]}, 36 | "bundles":[{ 37 | "name":"i18n.Parsing", 38 | "locales":["und"] 39 | }] 40 | } 41 | -------------------------------------------------------------------------------- /native/config-plc4x/serialization-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "types":[ 3 | ], 4 | "lambdaCapturingTypes":[ 5 | ], 6 | "proxies":[ 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /native/config/jni-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name":"[Lcom.sun.management.internal.DiagnosticCommandArgumentInfo;" 4 | }, 5 | { 6 | "name":"[Lcom.sun.management.internal.DiagnosticCommandInfo;" 7 | }, 8 | { 9 | "name":"com.sun.management.internal.DiagnosticCommandArgumentInfo", 10 | "methods":[{"name":"","parameterTypes":["java.lang.String","java.lang.String","java.lang.String","java.lang.String","boolean","boolean","boolean","int"] }] 11 | }, 12 | { 13 | "name":"com.sun.management.internal.DiagnosticCommandInfo", 14 | "methods":[{"name":"","parameterTypes":["java.lang.String","java.lang.String","java.lang.String","java.lang.String","java.lang.String","java.lang.String","boolean","java.util.List"] }] 15 | }, 16 | { 17 | "name":"java.lang.Boolean", 18 | "methods":[{"name":"getBoolean","parameterTypes":["java.lang.String"] }] 19 | }, 20 | { 21 | "name":"java.lang.ClassLoader", 22 | "methods":[{"name":"getPlatformClassLoader","parameterTypes":[] }, {"name":"loadClass","parameterTypes":["java.lang.String"] }] 23 | }, 24 | { 25 | "name":"java.util.Arrays", 26 | "methods":[{"name":"asList","parameterTypes":["java.lang.Object[]"] }] 27 | }, 28 | { 29 | "name":"jdk.internal.loader.ClassLoaders$PlatformClassLoader" 30 | }, 31 | { 32 | "name":"org.graalvm.jniutils.JNIExceptionWrapperEntryPoints", 33 | "methods":[{"name":"getClassName","parameterTypes":["java.lang.Class"] }] 34 | }, 35 | { 36 | "name":"sun.management.VMManagementImpl", 37 | "fields":[{"name":"compTimeMonitoringSupport"}, {"name":"currentThreadCpuTimeSupport"}, {"name":"objectMonitorUsageSupport"}, {"name":"otherThreadCpuTimeSupport"}, {"name":"remoteDiagnosticCommandsSupport"}, {"name":"synchronizerUsageSupport"}, {"name":"threadAllocatedMemorySupport"}, {"name":"threadContentionMonitoringSupport"}] 38 | } 39 | ] 40 | -------------------------------------------------------------------------------- /native/config/predefined-classes-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type":"agent-extracted", 4 | "classes":[ 5 | ] 6 | } 7 | ] 8 | 9 | -------------------------------------------------------------------------------- /native/config/proxy-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "interfaces":["org.apache.iotdb.service.rpc.thrift.IClientRPCService$Iface"] 4 | }, 5 | { 6 | "interfaces":["org.influxdb.impl.InfluxDBService"] 7 | } 8 | ] 9 | -------------------------------------------------------------------------------- /native/config/resource-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "resources":{ 3 | "includes":[{ 4 | "pattern":"\\QMETA-INF/MANIFEST.MF\\E" 5 | }, { 6 | "pattern":"\\QMETA-INF/services/io.vertx.config.spi.ConfigProcessor\\E" 7 | }, { 8 | "pattern":"\\QMETA-INF/services/io.vertx.config.spi.ConfigStoreFactory\\E" 9 | }, { 10 | "pattern":"\\QMETA-INF/services/io.vertx.core.spi.VertxServiceProvider\\E" 11 | }, { 12 | "pattern":"\\QMETA-INF/services/jakarta.json.spi.JsonProvider\\E" 13 | }, { 14 | "pattern":"\\QMETA-INF/services/java.sql.Driver\\E" 15 | }, { 16 | "pattern":"\\Qkafka/kafka-version.properties\\E" 17 | }, { 18 | "pattern":"\\Qlogback.xml\\E" 19 | }, { 20 | "pattern":"\\Qlogging.properties\\E" 21 | }, { 22 | "pattern":"\\Qmozilla/public-suffix-list.txt\\E" 23 | }, { 24 | "pattern":"\\Qorg/apache/http/nio/client/version.properties\\E" 25 | }, { 26 | "pattern":"\\Qorg/opensearch/client/version.properties\\E" 27 | }, { 28 | "pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E" 29 | }, { 30 | "pattern":"\\Qsimplelogger.properties\\E" 31 | }, { 32 | "pattern":"java.base:\\Qjdk/internal/icu/impl/data/icudt67b/nfkc.nrm\\E" 33 | }, { 34 | "pattern":"java.base:\\Qjdk/internal/icu/impl/data/icudt67b/uprops.icu\\E" 35 | }, { 36 | "pattern":"java.base:\\Qsun/net/idn/uidna.spp\\E" 37 | }, { 38 | "pattern":"java.base:\\Qsun/text/resources/nfkc.icu\\E" 39 | }, { 40 | "pattern":"java.base:\\Qsun/text/resources/uprops.icu\\E" 41 | }, { 42 | "pattern":"java.xml:\\Qcom/sun/org/apache/xml/internal/serializer/Encodings.properties\\E" 43 | }]}, 44 | "bundles":[{ 45 | "name":"com.sun.org.apache.xml.internal.serializer.XMLEntities", 46 | "locales":["", "und"] 47 | }, { 48 | "name":"i18n.Parsing", 49 | "locales":["und"] 50 | }] 51 | } 52 | -------------------------------------------------------------------------------- /native/config/serialization-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "types":[ 3 | { 4 | "name":"java.util.HashMap" 5 | }, 6 | { 7 | "name":"org.apache.http.impl.auth.BasicScheme" 8 | }, 9 | { 10 | "name":"org.apache.http.impl.auth.RFC2617Scheme" 11 | } 12 | ], 13 | "lambdaCapturingTypes":[ 14 | ], 15 | "proxies":[ 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /native/logging.properties: -------------------------------------------------------------------------------- 1 | handlers = java.util.logging.ConsoleHandler 2 | #handlers = java.util.logging.ConsoleHandler, java.util.logging.FileHandler 3 | 4 | java.util.logging.ConsoleHandler.level = ALL 5 | java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter 6 | 7 | java.util.logging.FileHandler.level = ALL 8 | java.util.logging.FileHandler.pattern = gateway.log.%g 9 | java.util.logging.FileHandler.limit = 50000 10 | java.util.logging.FileHandler.count = 1 11 | java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter 12 | 13 | java.util.logging.SimpleFormatter.format=[%1$tF %1$tT][%4$-7s][%3$-30.30s] %5$s %n%6$s 14 | -------------------------------------------------------------------------------- /native/security/rocworks-gateway.pfx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vogler75/automation-gateway/1f8b7fa2129cdf28651d9c9a540999b9cfd34096/native/security/rocworks-gateway.pfx -------------------------------------------------------------------------------- /native/source/.gitignore: -------------------------------------------------------------------------------- 1 | app 2 | app-plc4x 3 | -------------------------------------------------------------------------------- /source/.gitattributes: -------------------------------------------------------------------------------- 1 | # 2 | # https://help.github.com/articles/dealing-with-line-endings/ 3 | # 4 | # These are explicitly windows files and should use crlf 5 | *.bat text eol=crlf 6 | 7 | -------------------------------------------------------------------------------- /source/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore Gradle project-specific cache directory 2 | .gradle 3 | 4 | # Ignore Gradle build output directory 5 | build 6 | 7 | # Ignore Certificate Directory 8 | security 9 | 10 | # Add Gradle Wrappe Exception 11 | !gradle/wrapper/gradle-wrapper.jar 12 | 13 | # Bin Folder 14 | bin -------------------------------------------------------------------------------- /source/app-plc4x/.gitignore: -------------------------------------------------------------------------------- 1 | config.yaml -------------------------------------------------------------------------------- /source/app-plc4x/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-application-conventions' 3 | } 4 | 5 | dependencies { 6 | implementation project(':lib-core') 7 | implementation project(':lib-plc4x') 8 | 9 | implementation project(':lib-influxdb') 10 | implementation project(':lib-iotdb') 11 | implementation project(':lib-neo4j') 12 | implementation project(':lib-opensearch') 13 | 14 | implementation "io.vertx:vertx-core:$vertxVersion" 15 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 16 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 17 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 18 | 19 | implementation group: 'org.apache.plc4x', name: 'plc4j-api', version: '0.9.0' 20 | runtimeOnly group: 'org.apache.plc4x', name: 'plc4j-driver-s7', version: '0.9.0' 21 | runtimeOnly group: 'org.apache.plc4x', name: 'plc4j-driver-modbus', version: '0.9.0' 22 | runtimeOnly group: 'org.apache.plc4x', name: 'plc4j-driver-simulated', version: '0.9.0' 23 | 24 | runtimeOnly group: 'org.postgresql', name: 'postgresql', version: '42.2.27' 25 | runtimeOnly group: 'org.hsqldb', name: 'hsqldb', version: '2.7.2' 26 | 27 | //runtimeOnly group: 'mysql', name: 'mysql-connector-java', version: '8.0.28' 28 | //runtimeOnly group: 'io.crate', name: 'crate-jdbc', version: '2.6.0' 29 | //runtimeOnly group: 'com.microsoft.sqlserver', name: 'mssql-jdbc', version: '10.2.0.jre11' 30 | } 31 | 32 | application { 33 | // Define the main class for the application. 34 | mainClass = 'App' 35 | } 36 | -------------------------------------------------------------------------------- /source/app-plc4x/config-alltesting.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Enabled: true 4 | LogLevel: INFO 5 | Port: 1883 6 | Host: 0.0.0.0 7 | MaxMessageSizeKb: 8192 8 | 9 | OpcUa: 10 | - Enabled: true 11 | Port: 4841 12 | LogLevel: INFO 13 | Topics: 14 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Float 15 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_String 16 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 17 | - Topic: mqtt/mqtt_json_driver/path/json/# 18 | - Topic: mqtt/mqtt_spb_driver/path/spBv1.0/# 19 | - Topic: plc/modbus_driver/node/holding-register:1:INT 20 | 21 | GraphQL: 22 | - Port: 4001 23 | Enabled: true 24 | LogLevel: INFO 25 | 26 | Drivers: 27 | Mqtt: 28 | - Id: "mqtt_json_driver" 29 | Enabled: true 30 | LogLevel: INFO 31 | Host: linux0.rocworks.local 32 | Port: 1883 33 | Format: Json 34 | CustomJson: 35 | Value: "Value" 36 | TimestampMs: "TimeMS" 37 | TimestampIso: "TimeISO" 38 | 39 | - Id: "mqtt_spb_driver" 40 | Enabled: true 41 | LogLevel: INFO 42 | Host: linux0.rocworks.local 43 | Port: 1883 44 | Format: SparkplugB 45 | 46 | OpcUa: 47 | - Id: "opcua_driver" 48 | Enabled: true 49 | LogLevel: INFO 50 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 51 | UpdateEndpointUrl: true 52 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 53 | 54 | Plc4x: 55 | - Id: "modbus_driver" 56 | Enabled: true 57 | LogLevel: INFO 58 | Polling: 59 | Time: 500 60 | Url: "modbus://localhost:502" 61 | 62 | Loggers: 63 | Mqtt: 64 | - Id: "spb_logger" 65 | Enabled: true 66 | LogLevel: INFO 67 | Host: linux0.rocworks.local 68 | Port: 1883 69 | Topic: spb_logger 70 | Format: SparkplugB 71 | Logging: 72 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Float 73 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_String 74 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 75 | - Topic: mqtt/mqtt_json_driver/path/json_input/# 76 | - Topic: plc/modbus_driver/node/holding-register:1:INT 77 | 78 | - Id: "spb_logger_bulk" 79 | Enabled: true 80 | LogLevel: INFO 81 | Host: linux0.rocworks.local 82 | Port: 1883 83 | Topic: spb_logger_bulk 84 | Format: SparkplugB 85 | BulkMessages: true 86 | Logging: 87 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 88 | 89 | - Id: "mqtt_logger" 90 | Enabled: true 91 | LogLevel: INFO 92 | Host: linux0.rocworks.local 93 | Port: 1883 94 | Topic: mqtt_logger 95 | Format: Json 96 | BulkMessages: false 97 | MaxMessageSizeKb: 1024 98 | Logging: 99 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Float 100 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_String 101 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 102 | Target: mqtt_logger/opcua_driver/Target/# 103 | - Topic: mqtt/mqtt_json_driver/path/json_input/# 104 | - Topic: mqtt/mqtt_spb_driver/path/spb_logger/# 105 | - Topic: mqtt/mqtt_spb_driver/path/spBv1.0/# 106 | - Topic: plc/modbus_driver/node/holding-register:1:INT 107 | 108 | Kafka: 109 | - Id: "kafka_logger" 110 | Enabled: true 111 | LogLevel: INFO 112 | Servers: linux0.rocworks.local:9092 113 | Configs: 114 | batch.size: 25000 115 | enable.auto.commit: true 116 | auto.commit.interval.ms: 1000 117 | TopicName: gateway 118 | Format: Json 119 | Logging: 120 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Float 121 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_String 122 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 123 | - Topic: mqtt/mqtt_json_driver/path/json_input/# 124 | - Topic: mqtt/mqtt_spb_driver/path/spb_logger/# 125 | - Topic: mqtt/mqtt_spb_driver/path/spBv1.0/# 126 | - Topic: plc/modbus_driver/node/holding-register:1:INT 127 | 128 | InfluxDB: 129 | - Id: "influxdb_logger" 130 | Enabled: true 131 | Url: http://linux0.rocworks.local:8086 132 | Database: gateway 133 | Logging: 134 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Float 135 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_String 136 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 137 | - Topic: mqtt/mqtt_json_driver/path/json_input/# 138 | - Topic: mqtt/mqtt_spb_driver/path/spb_logger/# 139 | - Topic: mqtt/mqtt_spb_driver/path/spBv1.0/# 140 | - Topic: plc/modbus_driver/node/holding-register:1:INT 141 | 142 | IoTDB: 143 | - Id: "iotdb_logger" 144 | Enabled: true 145 | Host: linux0.rocworks.local 146 | Port: 6667 147 | Database: root.gateway 148 | Username: "root" 149 | Password: "root" 150 | Logging: 151 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Float 152 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_String 153 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 154 | - Topic: mqtt/mqtt_json_driver/path/json_input/# 155 | - Topic: mqtt/mqtt_spb_driver/path/spb_logger/# 156 | - Topic: mqtt/mqtt_spb_driver/path/spBv1.0/# 157 | - Topic: plc/modbus_driver/node/holding-register:1:INT 158 | 159 | Neo4j: 160 | - Id: "neo4j_logger" 161 | Enabled: true 162 | Url: bolt://linux0.rocworks.local:7687 163 | Username: "neo4j" 164 | Password: "manager123$" 165 | Schemas: 166 | - System: opcua_driver 167 | Logging: 168 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Float 169 | - Topic: opc/opcua_driver/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_String 170 | - Topic: opc/opcua_driver/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 171 | - Topic: mqtt/mqtt_json_driver/path/json_input/# 172 | - Topic: mqtt/mqtt_spb_driver/path/spb_logger/# 173 | - Topic: mqtt/mqtt_spb_driver/path/spBv1.0/# 174 | - Topic: plc/modbus_driver/node/holding-register:1:INT 175 | -------------------------------------------------------------------------------- /source/app-plc4x/config-example.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | GraphQL: 3 | - Port: 4000 4 | LogLevel: INFO 5 | GraphiQL: true 6 | 7 | Mqtt: 8 | - Port: 1883 9 | Host: 0.0.0.0 10 | Enabled: true 11 | LogLevel: INFO # ALL | INFO 12 | 13 | OpcUa: 14 | - Port: 4842 15 | Enabled: true 16 | LogLevel: INFO 17 | Topics: 18 | - Topic: plc/demo/node/holding-register:1:INT 19 | - Topic: plc/demo/node/holding-register:2:INT 20 | - Topic: plc/demo/node/holding-register:3:INT 21 | 22 | Drivers: 23 | Plc4x: 24 | - Id: "demo" 25 | Enabled: true 26 | Url: "modbus://localhost:502" 27 | Polling: 28 | Time: 100 29 | OldNew: true 30 | WriteTimeout: 100 31 | ReadTimeout: 100 32 | LogLevel: INFO 33 | 34 | Loggers: 35 | Mqtt: 36 | - Id: mqtt1 37 | Enabled: true 38 | Host: linux0.rocworks.local 39 | Port: 1883 40 | Ssl: false 41 | Topic: modbus 42 | Format: Raw 43 | Logging: 44 | - Topic: plc/demo/node/holding-register:1:INT 45 | - Topic: plc/demo/node/holding-register:2:INT 46 | - Topic: plc/demo/node/holding-register:3:INT -------------------------------------------------------------------------------- /source/app-plc4x/src/main/kotlin/App.kt: -------------------------------------------------------------------------------- 1 | import at.rocworks.gateway.core.opcua.KeyStoreLoader 2 | import at.rocworks.gateway.core.service.Common 3 | import at.rocworks.gateway.core.service.Component 4 | import at.rocworks.gateway.logger.influx.InfluxDBLogger 5 | import at.rocworks.gateway.logger.iotdb.IoTDBLogger 6 | import at.rocworks.gateway.logger.JdbcLogger 7 | import at.rocworks.gateway.logger.KafkaLogger 8 | import at.rocworks.gateway.logger.neo4j.Neo4jLogger 9 | 10 | import kotlin.Throws 11 | import kotlin.jvm.JvmStatic 12 | 13 | import io.vertx.core.Vertx 14 | import io.vertx.core.json.JsonObject 15 | 16 | import java.lang.Exception 17 | import java.util.logging.Logger 18 | 19 | object App { 20 | @Throws(Exception::class) 21 | @JvmStatic 22 | fun main(args: Array) { 23 | val vertx = Vertx.vertx() 24 | val logger = Logger.getLogger(javaClass.simpleName) 25 | 26 | fun factory(type: Component.ComponentType, config: JsonObject): Component? { 27 | return Component.defaultFactory(type, config) ?: when (type) { 28 | Component.ComponentType.InfluxDBLogger -> InfluxDBLogger.create(config) 29 | Component.ComponentType.IoTDBLogger -> IoTDBLogger(config) 30 | Component.ComponentType.KafkaLogger -> KafkaLogger(config) 31 | Component.ComponentType.JdbcLogger -> JdbcLogger(config) 32 | Component.ComponentType.Plc4xDriver -> Plc4xDriver(config) 33 | Component.ComponentType.Neo4jLogger -> Neo4jLogger(config) 34 | else -> { 35 | logger.severe("Unknown component type [${type}]") 36 | null 37 | } 38 | } 39 | } 40 | 41 | KeyStoreLoader.init() 42 | Common.initLogging() 43 | Common.initGateway(args, vertx, ::factory) 44 | } 45 | } -------------------------------------------------------------------------------- /source/app-plc4x/src/main/resources/logging.properties: -------------------------------------------------------------------------------- 1 | handlers = java.util.logging.ConsoleHandler 2 | #handlers = java.util.logging.ConsoleHandler, java.util.logging.FileHandler 3 | 4 | java.util.logging.ConsoleHandler.level = ALL 5 | java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter 6 | 7 | java.util.logging.FileHandler.level = ALL 8 | java.util.logging.FileHandler.pattern = gateway.log.%g 9 | java.util.logging.FileHandler.limit = 50000 10 | java.util.logging.FileHandler.count = 1 11 | java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter 12 | 13 | java.util.logging.SimpleFormatter.format=[%1$tF %1$tT][%4$-7s][%3$-30.30s] %5$s %n%6$s 14 | -------------------------------------------------------------------------------- /source/app-plc4x/src/main/resources/simplelogger.properties: -------------------------------------------------------------------------------- 1 | org.slf4j.simpleLogger.defaultLogLevel=error 2 | -------------------------------------------------------------------------------- /source/app/.gitignore: -------------------------------------------------------------------------------- 1 | config.yaml 2 | *.buf -------------------------------------------------------------------------------- /source/app/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-application-conventions' 3 | } 4 | 5 | dependencies { 6 | implementation project(':lib-core') 7 | 8 | implementation project(':lib-influxdb') 9 | implementation project(':lib-questdb') 10 | implementation project(':lib-iotdb') 11 | implementation project(':lib-neo4j') 12 | implementation project(':lib-opensearch') 13 | 14 | implementation "io.vertx:vertx-core:$vertxVersion" 15 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 16 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 17 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 18 | 19 | runtimeOnly group: 'org.postgresql', name: 'postgresql', version: '42.7.4' 20 | runtimeOnly group: 'org.hsqldb', name: 'hsqldb', version: '2.7.2' 21 | 22 | //runtimeOnly group: 'mysql', name: 'mysql-connector-java', version: '8.0.28' 23 | //runtimeOnly group: 'io.crate', name: 'crate-jdbc', version: '2.6.0' 24 | //runtimeOnly group: 'com.microsoft.sqlserver', name: 'mssql-jdbc', version: '10.2.0.jre11' 25 | } 26 | 27 | application { 28 | // Define the main class for the application. 29 | mainClass = 'App' 30 | } -------------------------------------------------------------------------------- /source/app/configs/config-demo-1.yaml: -------------------------------------------------------------------------------- 1 | # OPC UA Aggregation to Frankenstein OPC UA Server 2 | 3 | Drivers: 4 | OpcUa: 5 | - Id: "demo1" 6 | Enabled: true 7 | LogLevel: INFO 8 | EndpointUrl: "opc.tcp://192.168.1.4:62540/server" 9 | UpdateEndpointUrl: true 10 | SecurityPolicy: None 11 | 12 | - Id: "demo2" 13 | Enabled: true 14 | LogLevel: INFO 15 | WriteSchemaToFile: false 16 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 17 | UpdateEndpointUrl: true 18 | SecurityPolicy: None 19 | 20 | - Id: "s7" 21 | Enabled: true 22 | LogLevel: INFO 23 | EndpointUrl: "opc.tcp://192.168.1.99:4840" 24 | UpdateEndpointUrl: true 25 | SecurityPolicy: None 26 | 27 | Servers: 28 | OpcUa: 29 | - Port: 4841 30 | Enabled: true 31 | LogLevel: INFO 32 | Topics: 33 | - Topic: opc/demo1/path/Objects/Dynamic/# 34 | - Topic: opc/demo2/path/Objects/Demo/Simulation/# 35 | - Topic: opc/s7/path/Objects/PLC_1/DataBlocksGlobal/DB5/# 36 | -------------------------------------------------------------------------------- /source/app/configs/config-demo-2.yaml: -------------------------------------------------------------------------------- 1 | # OPC UA + MQTT Aggregation to Frankenstein OPC UA Server 2 | 3 | Drivers: 4 | OpcUa: 5 | - Id: "demo1" 6 | Enabled: true 7 | LogLevel: INFO 8 | EndpointUrl: "opc.tcp://192.168.1.4:62540/server" 9 | UpdateEndpointUrl: true 10 | SecurityPolicy: None 11 | 12 | - Id: "demo2" 13 | Enabled: true 14 | LogLevel: INFO 15 | WriteSchemaToFile: false 16 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 17 | UpdateEndpointUrl: true 18 | SecurityPolicy: None 19 | 20 | - Id: "s7" 21 | Enabled: true 22 | LogLevel: INFO 23 | EndpointUrl: "opc.tcp://192.168.1.99:4840" 24 | UpdateEndpointUrl: true 25 | SecurityPolicy: None 26 | 27 | Mqtt: 28 | - Id: "home" 29 | Enabled: true 30 | LogLevel: INFO 31 | Host: 192.168.1.4 32 | Port: 1883 33 | Format: Json 34 | CustomJson: 35 | Value: "Value" 36 | TimestampMs: "TimeMS" 37 | TimestampIso: "TimeISO" 38 | 39 | Servers: 40 | OpcUa: 41 | - Port: 4841 42 | Enabled: true 43 | LogLevel: INFO 44 | Topics: 45 | - Topic: opc/s7/path/Objects/PLC_1/DataBlocksGlobal/DB5/# 46 | - Topic: opc/demo1/path/Objects/Dynamic/# 47 | - Topic: opc/demo2/path/Objects/Demo/Simulation/# 48 | - Topic: mqtt/home/path/Original/Govee/# 49 | - Topic: mqtt/home/path/Original/Meter_Input/# 50 | 51 | 52 | -------------------------------------------------------------------------------- /source/app/configs/config-demo-3.yaml: -------------------------------------------------------------------------------- 1 | # OPC UA + MQTT Logging to InfluxDB and QuestDB 2 | 3 | Drivers: 4 | OpcUa: 5 | - Id: "demo1" 6 | Enabled: true 7 | LogLevel: INFO 8 | EndpointUrl: "opc.tcp://192.168.1.4:62540/server" 9 | UpdateEndpointUrl: true 10 | SecurityPolicy: None 11 | 12 | - Id: "demo2" 13 | Enabled: true 14 | LogLevel: INFO 15 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 16 | UpdateEndpointUrl: true 17 | SecurityPolicy: None 18 | SubscriptionSamplingInterval: 0.0 19 | 20 | - Id: "demo3" 21 | Enabled: true 22 | LogLevel: INFO 23 | EndpointUrl: "opc.tcp://192.168.1.4:62542" 24 | UpdateEndpointUrl: true 25 | SecurityPolicy: None 26 | SubscriptionSamplingInterval: 0.0 27 | 28 | - Id: "s7" 29 | Enabled: true 30 | LogLevel: INFO 31 | EndpointUrl: "opc.tcp://192.168.1.99:4840" 32 | UpdateEndpointUrl: true 33 | SecurityPolicy: None 34 | 35 | Mqtt: 36 | - Id: "home" 37 | Enabled: true 38 | LogLevel: INFO 39 | Host: 192.168.1.4 40 | Port: 1883 41 | Format: Json 42 | CustomJson: 43 | Value: "Value" 44 | TimestampMs: "TimeMS" 45 | TimestampIso: "TimeISO" 46 | 47 | Servers: 48 | Mqtt: 49 | - Port: 1883 50 | Host: 0.0.0.0 51 | LogLevel: INFO # ALL | INFO 52 | 53 | Loggers: 54 | InfluxDB: 55 | - Id: influxdb1 56 | Enabled: true 57 | Url: http://scada:8086 58 | Database: test 59 | Logging: 60 | - Topic: opc/s7/path/Objects/PLC_1/DataBlocksGlobal/DB5/# 61 | - Topic: opc/demo1/path/Objects/Dynamic/# 62 | - Topic: opc/demo2/path/Objects/Demo/Simulation/# 63 | - Topic: mqtt/home/path/Original/Govee/# 64 | - Topic: mqtt/home/path/Original/Meter_Input/# 65 | 66 | QuestDB: 67 | - Id: questdb1 68 | Enabled: true 69 | Config: http::addr=localhost:9000; 70 | Table: frankenstein 71 | Logging: 72 | - Topic: opc/s7/path/Objects/PLC_1/DataBlocksGlobal/DB5/# 73 | - Topic: opc/demo1/path/Objects/Dynamic/# 74 | - Topic: opc/demo2/path/Objects/Demo/Simulation/# 75 | - Topic: mqtt/home/path/Original/Govee/# 76 | - Topic: mqtt/home/path/Original/Meter_Input/# 77 | 78 | #- Topic: opc/demo2/path/Objects/Demo/SimulationMass/# 79 | #- Topic: opc/demo3/path/Objects/Demo/SimulationMass/# -------------------------------------------------------------------------------- /source/app/configs/config-demo-4.yaml: -------------------------------------------------------------------------------- 1 | # OPC UA + MQTT Logging to MQTT 2 | 3 | Drivers: 4 | OpcUa: 5 | - Id: "demo1" 6 | Enabled: true 7 | LogLevel: INFO 8 | EndpointUrl: "opc.tcp://192.168.1.4:62540/server" 9 | UpdateEndpointUrl: true 10 | SecurityPolicy: None 11 | 12 | - Id: "demo2" 13 | Enabled: true 14 | LogLevel: INFO 15 | WriteSchemaToFile: false 16 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 17 | UpdateEndpointUrl: true 18 | SecurityPolicy: None 19 | 20 | - Id: "s7" 21 | Enabled: true 22 | LogLevel: INFO 23 | EndpointUrl: "opc.tcp://192.168.1.99:4840" 24 | UpdateEndpointUrl: true 25 | SecurityPolicy: None 26 | 27 | Mqtt: 28 | - Id: "home" 29 | Enabled: true 30 | LogLevel: INFO 31 | Host: 192.168.1.4 32 | Port: 1883 33 | Format: Json 34 | CustomJson: 35 | Value: "Value" 36 | TimestampMs: "TimeMS" 37 | TimestampIso: "TimeISO" 38 | 39 | Loggers: 40 | Mqtt: 41 | - Id: mqtt1 42 | Enabled: true 43 | Host: linux0.rocworks.local 44 | #Format: SparkplugB 45 | Logging: 46 | - Topic: opc/s7/path/Objects/PLC_1/DataBlocksGlobal/DB5/# 47 | - Topic: opc/demo1/path/Objects/Dynamic/# 48 | - Topic: opc/demo2/path/Objects/Demo/Simulation/# 49 | - Topic: mqtt/home/path/Original/Govee/# 50 | - Topic: mqtt/home/path/Original/Meter_Input/# 51 | -------------------------------------------------------------------------------- /source/app/configs/config-demo-5.yaml: -------------------------------------------------------------------------------- 1 | # MQTT and GraphQL Server 2 | 3 | Drivers: 4 | OpcUa: 5 | - Id: "demo1" 6 | Enabled: true 7 | LogLevel: INFO 8 | EndpointUrl: "opc.tcp://192.168.1.4:62540/server" 9 | UpdateEndpointUrl: true 10 | SecurityPolicy: None 11 | 12 | - Id: "demo2" 13 | Enabled: true 14 | LogLevel: INFO 15 | WriteSchemaToFile: false 16 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 17 | UpdateEndpointUrl: true 18 | SecurityPolicy: None 19 | 20 | - Id: "s7" 21 | Enabled: true 22 | LogLevel: INFO 23 | EndpointUrl: "opc.tcp://192.168.1.99:4840" 24 | UpdateEndpointUrl: true 25 | SecurityPolicy: None 26 | 27 | Mqtt: 28 | - Id: "home" 29 | Enabled: true 30 | LogLevel: INFO 31 | Host: 192.168.1.4 32 | Port: 1883 33 | Format: Json 34 | CustomJson: 35 | Value: "Value" 36 | TimestampMs: "TimeMS" 37 | TimestampIso: "TimeISO" 38 | 39 | Servers: 40 | GraphQL: 41 | - Port: 4000 42 | LogLevel: INFO 43 | GraphiQL: true 44 | 45 | Mqtt: 46 | - Port: 1883 47 | Host: 0.0.0.0 48 | LogLevel: INFO # ALL | INFO 49 | -------------------------------------------------------------------------------- /source/app/configs/config-milo-kafka.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Port: 1883 4 | Host: 0.0.0.0 5 | LogLevel: INFO # ALL | INFO 6 | 7 | GraphQL: 8 | - Port: 4000 9 | LogLevel: INFO 10 | GraphiQL: true 11 | WriteSchemaToFile: false 12 | Schemas: 13 | - System: milo 14 | FieldName: DisplayName 15 | RootNodes: 16 | - ns=2;s=Dynamic 17 | 18 | Loggers: 19 | Kafka: 20 | - Id: kafka1 21 | Servers: nuc1:9092 22 | Enabled: true 23 | WriteParameters: 24 | QueueSize: 20000 25 | BlockSize: 10000 26 | Logging: 27 | - Topic: opc/milo/path/Objects/Dynamic/+ 28 | 29 | InfluxDB: 30 | - Id: influx1 31 | Url: http://nuc1:8086 32 | Database: test 33 | Username: "" 34 | Password: "" 35 | WriteParameters: 36 | QueueSize: 20000 37 | BlockSize: 10000 38 | Logging: 39 | - Topic: opc/milo/path/Objects/Dynamic/+ 40 | 41 | Drivers: 42 | OpcUa: 43 | - Id: "milo" 44 | Enabled: true 45 | LogLevel: INFO 46 | EndpointUrl: "opc.tcp://milo.digitalpetri.com:62541/milo" 47 | UpdateEndpointUrl: false 48 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 49 | UsernameProvider: 50 | Username: user1 51 | Password: password 52 | ConnectTimeout: 5000 53 | RequestTimeout: 5000 54 | KeepAliveFailuresAllowed: 0 55 | SubscriptionSamplingInterval: 0.0 56 | AddressCache: 57 | MaximumSize: 10000 58 | ExpireAfterSeconds: 3600 59 | WriteParameters: 60 | QueueSize: 10000 61 | BlockSize: 200 62 | WithTime: false 63 | MonitoringParameters: 64 | BufferSize: 10 65 | SamplingInterval: 0 66 | DiscardOldest: true 67 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 68 | -------------------------------------------------------------------------------- /source/app/configs/config-mqtt-opcua.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | OpcUa: 3 | - Port: 4841 4 | Enabled: true 5 | LogLevel: INFO 6 | Topics: 7 | - Topic: mqtt/mosquitto/path/testtopic/# 8 | - Topic: mqtt/hivemq/path/testtopic/# 9 | 10 | Drivers: 11 | Mqtt: 12 | - Id: "mosquitto" 13 | LogLevel: INFO 14 | Enabled: true 15 | Host: test.mosquitto.org 16 | Port: 1883 17 | Format: Raw 18 | MaxMessageSizeKb: 65536 19 | 20 | - Id: "hivemq" 21 | LogLevel: INFO 22 | Enabled: true 23 | Host: broker.hivemq.com 24 | Port: 1883 25 | Format: Raw 26 | MaxMessageSizeKb: 65536 27 | -------------------------------------------------------------------------------- /source/app/configs/config-mqtt-spb-decoder.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | OpcUa: 3 | - Port: 4840 4 | Enabled: true 5 | LogLevel: INFO 6 | Topics: 7 | - Topic: mqtt/input/path/spBv1.0/# 8 | 9 | Drivers: 10 | Mqtt: 11 | - Id: "input" 12 | Enabled: true 13 | LogLevel: FINE 14 | Host: 192.168.1.4 15 | Port: 1883 16 | Ssl: false 17 | Format: SparkplugB 18 | 19 | Loggers: 20 | Mqtt: 21 | - Id: "output" 22 | Enabled: true 23 | Host: 192.168.1.4 24 | Port: 1883 25 | Topic: json 26 | Format: Raw 27 | LogLevel: INFO 28 | Logging: 29 | - Topic: mqtt/input/path/spBv1.0/# -------------------------------------------------------------------------------- /source/app/configs/config-mqtt-spb-influx.yaml: -------------------------------------------------------------------------------- 1 | Drivers: 2 | Mqtt: 3 | - Id: "MqttClient1" 4 | Host: xxxxxxxxxxxxxxxxxx.s1.eu.hivemq.cloud 5 | Port: 8883 6 | Ssl: true 7 | Format: SparkplugB 8 | Username: "vogler" 9 | Password: "xxxxxxxxxxxxxxxx" 10 | 11 | Loggers: 12 | InfluxDB: 13 | - Id: InfluxLogger1 14 | Enabled: true 15 | Url: http://nuc1b.rocworks.local:8086 16 | Database: test 17 | WriteParameters: 18 | QueueSize: 20000 19 | BlockSize: 10000 20 | Logging: 21 | - Topic: mqtt/MqttClient1/path/Austria/Sparkplug/# -------------------------------------------------------------------------------- /source/app/configs/config-opcua-aggregator.yaml: -------------------------------------------------------------------------------- 1 | Drivers: 2 | OpcUa: 3 | - Id: "demo1" 4 | Enabled: true 5 | LogLevel: INFO 6 | EndpointUrl: "opc.tcp://scada.rocworks.local:62541" 7 | UpdateEndpointUrl: true 8 | SecurityPolicy: None 9 | 10 | - Id: "demo2" 11 | Enabled: true 12 | LogLevel: INFO 13 | EndpointUrl: "opc.tcp://scada.rocworks.local:62542" 14 | UpdateEndpointUrl: true 15 | SecurityPolicy: None 16 | 17 | Servers: 18 | OpcUa: 19 | - Id: opc 20 | Port: 4841 21 | Topics: 22 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/# 23 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/# -------------------------------------------------------------------------------- /source/app/configs/config-opcua-mqtt.yaml: -------------------------------------------------------------------------------- 1 | Drivers: 2 | OpcUa: 3 | - Id: "demo" 4 | Enabled: true 5 | LogLevel: INFO 6 | EndpointUrl: "opc.tcp://192.168.1.3:62541" 7 | UpdateEndpointUrl: true 8 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 9 | Loggers: 10 | Mqtt: 11 | - Id: mqtt1 12 | Enabled: true 13 | Host: linux0.rocworks.local 14 | Port: 1883 15 | Ssl: false 16 | Topic: Enterprise/Site 17 | Logging: 18 | - Topic: opc/demo/path/Objects/Demo/SimulationMass/# -------------------------------------------------------------------------------- /source/app/configs/config-opcua-neo4j.yaml: -------------------------------------------------------------------------------- 1 | Drivers: 2 | OpcUa: 3 | - Id: "opcua_driver" 4 | Enabled: true 5 | LogLevel: INFO 6 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 7 | UpdateEndpointUrl: true 8 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 9 | Loggers: 10 | Neo4j: 11 | - Id: "neo4j_logger" 12 | Enabled: true 13 | Url: bolt://linux0.rocworks.local:7687 14 | Username: "neo4j" 15 | Password: "manager123$" 16 | Schemas: 17 | - System: opcua_driver 18 | Logging: 19 | - Topic: opc/opcua_driver/path/Objects/# -------------------------------------------------------------------------------- /source/app/configs/config-spb-opcua.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | OpcUa: 3 | - Id: opc 4 | Topics: 5 | - Topic: mqtt/spb/path/spBv1.0/# 6 | 7 | Drivers: 8 | Mqtt: 9 | - Id: spb 10 | LogLevel: FINE 11 | Host: 192.168.1.4 12 | Port: 1883 13 | Format: SparkplugB 14 | 15 | Loggers: 16 | Mqtt: 17 | - Id: test 18 | LogLevel: FINE 19 | Host: 192.168.1.4 20 | Port: 1883 21 | Format: Raw 22 | Logging: 23 | - Topic: mqtt/spb/path/spBv1.0/# 24 | -------------------------------------------------------------------------------- /source/app/configs/config-test-influx.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Port: 1883 4 | Host: 0.0.0.0 5 | LogLevel: INFO # ALL | INFO 6 | 7 | GraphQL: 8 | - Port: 4000 9 | LogLevel: INFO 10 | GraphiQL: true 11 | WriteSchemaToFile: false 12 | Schemas: 13 | - System: opc1 14 | 15 | # - System: ignition 16 | # FieldName: BrowseName # BrowseName | DisplayName 17 | # - System: unified 18 | # FieldName: DisplayName # BrowseName | DisplayName 19 | # - System: winccoa 20 | # FieldName: BrowseName # BrowseName | DisplayName 21 | 22 | Loggers: 23 | InfluxDB: 24 | - Id: influx 25 | Url: http://nuc1:8086 26 | Database: test 27 | Username: "" 28 | Password: "" 29 | WriteParameters: 30 | QueueSize: 20000 31 | BlockSize: 10000 32 | Logging: 33 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_SByte/+ 34 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Byte/+ 35 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_SByte/+ 36 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Float/+ 37 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Double/+ 38 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Integer/+ 39 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Int16/+ 40 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Int32/+ 41 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Int64/+ 42 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Unt16/+ 43 | #- Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_UInt32/+ 44 | - Topic: opc/opc1/path/Objects/Demo/Scalar/Scalar_Static/Scalar_Static_Int64 45 | 46 | Drivers: 47 | OpcUa: 48 | - Id: "opc1" 49 | Enabled: true 50 | LogLevel: INFO 51 | EndpointUrl: "opc.tcp://nuc1:20001" 52 | UpdateEndpointUrl: true 53 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 54 | ConnectTimeout: 5000 55 | RequestTimeout: 5000 56 | KeepAliveFailuresAllowed: 0 57 | SubscriptionSamplingInterval: 0.0 58 | WriteParameters: 59 | QueueSize: 10000 60 | BlockSize: 200 61 | WithTime: false 62 | MonitoringParameters: 63 | BufferSize: 10 64 | SamplingInterval: 0 65 | DiscardOldest: true 66 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 67 | 68 | - Id: "winccoa" 69 | Enabled: false 70 | LogLevel: INFO 71 | EndpointUrl: "opc.tcp://centos1:4840" 72 | UpdateEndpointUrl: true 73 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 74 | ConnectTimeout: 5000 75 | RequestTimeout: 5000 76 | KeepAliveFailuresAllowed: 0 77 | SubscriptionSamplingInterval: 0.0 78 | WriteParameters: 79 | QueueSize: 10000 80 | BlockSize: 200 81 | WithTime: false 82 | MonitoringParameters: 83 | BufferSize: 10 84 | SamplingInterval: 0 85 | DiscardOldest: true 86 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 87 | 88 | - Id: "ignition" 89 | Enabled: false 90 | LogLevel: INFO 91 | EndpointUrl: "opc.tcp://ubuntu1:62541/discovery" 92 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 93 | ConnectTimeout: 5000 94 | RequestTimeout: 5000 95 | KeepAliveFailuresAllowed: 10 96 | SubscriptionSamplingInterval: 0 97 | UsernameProvider: 98 | Username: opcuauser 99 | Password: password 100 | WriteParameters: 101 | QueueSize: 1000 102 | BlockSize: 100 103 | WithTime: true 104 | MonitoringParameters: 105 | BufferSize: 100 106 | SamplingInterval: 0.0 107 | DiscardOldest: false 108 | DataChangeTrigger: StatusValueTimestamp # Status | StatusValue | StatusValueTimestamp 109 | 110 | - Id: "unified" 111 | Enabled: false 112 | LogLevel: INFO 113 | EndpointUrl: "opc.tcp://desktop-9o6hthf:4890" 114 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#Basic128Rsa15 115 | SubscriptionSamplingInterval: 0 116 | UsernameProvider: 117 | Username: opcuauser 118 | Password: password1 119 | WriteParameters: 120 | QueueSize: 1000 121 | BlockSize: 100 122 | WithTime: true 123 | MonitoringParameters: 124 | BufferSize: 10 125 | SamplingInterval: 0.0 126 | DiscardOldest: true 127 | -------------------------------------------------------------------------------- /source/app/configs/config-test-kafka.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Id: Mqtt 4 | Port: 1883 5 | Host: 0.0.0.0 6 | LogLevel: INFO # ALL | INFO 7 | 8 | GraphQL: 9 | - Port: 4000 10 | LogLevel: INFO 11 | GraphiQL: true 12 | WriteSchemaToFile: false 13 | Schemas: 14 | - System: opc1 15 | 16 | # - System: ignition 17 | # FieldName: BrowseName # BrowseName | DisplayName 18 | # - System: unified 19 | # FieldName: DisplayName # BrowseName | DisplayName 20 | # - System: winccoa 21 | # FieldName: BrowseName # BrowseName | DisplayName 22 | 23 | Loggers: 24 | Kafka: 25 | - Id: kafka1 26 | Enabled: true 27 | Servers: 192.168.1.131:9092 28 | TopicName: test # default is the system id of the topic 29 | #KeyName: test # default is the node path name 30 | Configs: 31 | batch.size: 25000 32 | group.id: frankenstein-producer 33 | enable.auto.commit: true 34 | auto.commit.interval.ms: 1000 35 | WriteParameters: 36 | QueueSize: 10000 37 | BlockSize: 1000 38 | 39 | Logging: 40 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_SByte/+ 41 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Byte/+ 42 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Float/+ 43 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Double/+ 44 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Integer/+ 45 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Int16/+ 46 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Int32/+ 47 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Int64/+ 48 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Unt16/+ 49 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_UInt32/+ 50 | 51 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_SByte/+ 52 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Byte/+ 53 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Float/+ 54 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Double/+ 55 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Integer/+ 56 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Int16/+ 57 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Int32/+ 58 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Int64/+ 59 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Unt16/+ 60 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_UInt32/+ 61 | 62 | Drivers: 63 | OpcUa: 64 | - Id: "opc1" 65 | Enabled: true 66 | LogLevel: INFO 67 | EndpointUrl: "opc.tcp://192.168.1.131:20011" 68 | UpdateEndpointUrl: true 69 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 70 | ConnectTimeout: 5000 71 | RequestTimeout: 5000 72 | KeepAliveFailuresAllowed: 0 73 | SubscriptionSamplingInterval: 0.0 74 | WriteParameters: 75 | QueueSize: 10000 76 | BlockSize: 200 77 | WithTime: false 78 | MonitoringParameters: 79 | BufferSize: 10 80 | SamplingInterval: 0 81 | DiscardOldest: true 82 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 83 | 84 | - Id: "opc2" 85 | Enabled: true 86 | LogLevel: INFO 87 | EndpointUrl: "opc.tcp://192.168.1.131:20011" 88 | UpdateEndpointUrl: true 89 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 90 | ConnectTimeout: 5000 91 | RequestTimeout: 5000 92 | KeepAliveFailuresAllowed: 0 93 | SubscriptionSamplingInterval: 0.0 94 | WriteParameters: 95 | QueueSize: 10000 96 | BlockSize: 200 97 | WithTime: false 98 | MonitoringParameters: 99 | BufferSize: 10 100 | SamplingInterval: 0 101 | DiscardOldest: true 102 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 103 | 104 | - Id: "winccoa" 105 | Enabled: false 106 | LogLevel: INFO 107 | EndpointUrl: "opc.tcp://centos1:4840" 108 | UpdateEndpointUrl: true 109 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 110 | ConnectTimeout: 5000 111 | RequestTimeout: 5000 112 | KeepAliveFailuresAllowed: 0 113 | SubscriptionSamplingInterval: 0.0 114 | WriteParameters: 115 | QueueSize: 10000 116 | BlockSize: 200 117 | WithTime: false 118 | MonitoringParameters: 119 | BufferSize: 10 120 | SamplingInterval: 0 121 | DiscardOldest: true 122 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 123 | 124 | - Id: "ignition" 125 | Enabled: false 126 | LogLevel: INFO 127 | EndpointUrl: "opc.tcp://ubuntu1:62541/discovery" 128 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 129 | ConnectTimeout: 5000 130 | RequestTimeout: 5000 131 | KeepAliveFailuresAllowed: 10 132 | SubscriptionSamplingInterval: 0 133 | UsernameProvider: 134 | Username: opcuauser 135 | Password: password 136 | WriteParameters: 137 | QueueSize: 1000 138 | BlockSize: 100 139 | WithTime: true 140 | MonitoringParameters: 141 | BufferSize: 100 142 | SamplingInterval: 0.0 143 | DiscardOldest: false 144 | DataChangeTrigger: StatusValueTimestamp # Status | StatusValue | StatusValueTimestamp 145 | 146 | - Id: "unified" 147 | Enabled: false 148 | LogLevel: INFO 149 | EndpointUrl: "opc.tcp://desktop-9o6hthf:4890" 150 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#Basic128Rsa15 151 | SubscriptionSamplingInterval: 0 152 | UsernameProvider: 153 | Username: opcuauser 154 | Password: password1 155 | WriteParameters: 156 | QueueSize: 1000 157 | BlockSize: 100 158 | WithTime: true 159 | MonitoringParameters: 160 | BufferSize: 10 161 | SamplingInterval: 0.0 162 | DiscardOldest: true 163 | -------------------------------------------------------------------------------- /source/app/configs/config-test-loggers.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Id: "mqtt" 4 | Enabled: true 5 | 6 | Drivers: 7 | OpcUa: 8 | - Id: "demo1" 9 | Enabled: true 10 | LogLevel: INFO 11 | EndpointUrl: "opc.tcp://localhost:62541/server" 12 | UpdateEndpointUrl: true 13 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 14 | KeepAliveFailuresAllowed: 0 15 | MonitoringParameters: 16 | SamplingInterval: 10 17 | 18 | Loggers: 19 | Mqtt: 20 | - Id: mqtt1 21 | Enabled: false 22 | Host: linux0.rocworks.local 23 | Format: Raw 24 | Port: 1883 25 | Logging: 26 | #- Topic: opc/demo1/path/Objects/Dynamic/# 27 | - Topic: opc/demo1/path/Objects/Dynamic/IncreasingInt16_000 28 | 29 | Kafka: 30 | - Id: kafka1 31 | Enabled: false 32 | Servers: linux0:9092 33 | TopicName: frankenstein 34 | Format: Raw 35 | Logging: 36 | #- Topic: opc/demo1/path/Objects/Dynamic/# 37 | - Topic: opc/demo1/path/Objects/Dynamic/IncreasingInt16_000 38 | 39 | InfluxDB: 40 | - Id: influxdb1 41 | Enabled: false 42 | LogLevel: FINEST 43 | Url: http://linux0:8086 44 | Database: frankenstein 45 | WriteParameters: 46 | QueueSize: 1000000 47 | Logging: 48 | #- Topic: opc/demo1/path/Objects/Dynamic/# 49 | - Topic: opc/demo1/path/Objects/Dynamic/IncreasingInt16_000 50 | 51 | IoTDB: 52 | - Id: iotdb1 53 | Enabled: false 54 | Host: linux0 55 | Port: 6667 56 | Database: root.frankenstein 57 | Username: "root" 58 | Password: "root" 59 | WriteParameters: 60 | QueueSize: 1000000 61 | Logging: 62 | #- Topic: opc/demo1/path/Objects/Dynamic/# 63 | - Topic: opc/demo1/path/Objects/Dynamic/IncreasingInt16_000 64 | 65 | Jdbc: 66 | - Id: timescale1 67 | Enabled: false 68 | Url: jdbc:postgresql://linux0:5432/postgres 69 | Username: system 70 | Password: manager 71 | WriteParameters: 72 | QueueSize: 1000000 73 | Logging: 74 | #- Topic: opc/demo1/path/Objects/Dynamic/# 75 | - Topic: opc/demo1/path/Objects/Dynamic/IncreasingInt16_000 76 | 77 | OpenSearch: 78 | - Id: opensearch1 79 | Enabled: true 80 | Host: linux0 81 | Port: 9200 82 | Index: frankenstein 83 | WriteParameters: 84 | QueueSize: 1000000 85 | Logging: 86 | #- Topic: opc/demo1/path/Objects/Dynamic/# 87 | - Topic: opc/demo1/path/Objects/Dynamic/IncreasingInt16_000 88 | -------------------------------------------------------------------------------- /source/app/configs/config-test-mqtt.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Port: 1883 4 | Host: 0.0.0.0 5 | LogLevel: INFO # ALL | INFO 6 | 7 | GraphQL: 8 | - Port: 4000 9 | LogLevel: INFO 10 | GraphiQL: true 11 | WriteSchemaToFile: false 12 | 13 | Drivers: 14 | Mqtt: 15 | - Id: "mqtt1" 16 | Enabled: true 17 | LogLevel: INFO 18 | Host: 192.168.1.6 19 | Port: 1883 20 | Ssl: false 21 | 22 | Loggers: 23 | InfluxDB: 24 | - Id: influx1 25 | Enabled: true 26 | Url: http://nuc1.rocworks.local:8086 27 | Database: test 28 | Username: "" 29 | Password: "" 30 | WriteParameters: 31 | QueueSize: 20000 32 | BlockSize: 10000 33 | Logging: 34 | - Topic: mqtt/mqtt1/path/Meter_Input/WattAct 35 | - Topic: mqtt/mqtt1/path/Meter_Output/WattAct 36 | - Topic: mqtt/mqtt1/path/PV/Spot/+ -------------------------------------------------------------------------------- /source/app/configs/config-test-neo4j.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Port: 1883 4 | Host: 0.0.0.0 5 | LogLevel: INFO # ALL | INFO 6 | 7 | - Id: MqttWs 8 | Port: 1884 9 | Websocket: true 10 | Host: 0.0.0.0 11 | Username: system 12 | Password: manager 13 | LogLevel: INFO # ALL | INFO 14 | 15 | GraphQL: 16 | - Port: 4000 17 | LogLevel: INFO 18 | GraphiQL: true 19 | WriteSchemaToFile: false 20 | #Schemas: 21 | # - System: opc1 22 | # FieldName: BrowseName 23 | # - System: opc2 24 | # FieldName: BrowseName 25 | # - System: opc3 26 | # FieldName: BrowseName 27 | 28 | Drivers: 29 | OpcUa: 30 | - Id: "opc1" 31 | Enabled: true 32 | LogLevel: INFO 33 | EndpointUrl: "opc.tcp://192.168.1.3:62541/server" 34 | UpdateEndpointUrl: true 35 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 36 | MonitoringParameters: 37 | BufferSize: 10 38 | SamplingInterval: 0 39 | DiscardOldest: true 40 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 41 | 42 | - Id: "opc2" 43 | Enabled: true 44 | LogLevel: INFO 45 | EndpointUrl: "opc.tcp://192.168.1.3:62541/server" 46 | UpdateEndpointUrl: true 47 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 48 | MonitoringParameters: 49 | BufferSize: 10 50 | SamplingInterval: 0 51 | DiscardOldest: true 52 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 53 | 54 | - Id: "opc3" 55 | Enabled: true 56 | LogLevel: INFO 57 | EndpointUrl: "opc.tcp://192.168.1.3:62541/server" 58 | UpdateEndpointUrl: true 59 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 60 | MonitoringParameters: 61 | BufferSize: 10 62 | SamplingInterval: 0 63 | DiscardOldest: true 64 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 65 | 66 | - Id: "winccoa1" 67 | Enabled: true 68 | LogLevel: INFO 69 | EndpointUrl: "opc.tcp://centos1.rocworks.local:4840" 70 | UpdateEndpointUrl: true 71 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 72 | ConnectTimeout: 5000 73 | RequestTimeout: 5000 74 | KeepAliveFailuresAllowed: 0 75 | SubscriptionSamplingInterval: 0.0 76 | WriteParameters: 77 | QueueSize: 10000 78 | BlockSize: 200 79 | WithTime: false 80 | MonitoringParameters: 81 | BufferSize: 10 82 | SamplingInterval: 0 83 | DiscardOldest: true 84 | DataChangeTrigger: StatusValue # Status | StatusValue | StatusValueTimestamp 85 | 86 | - Id: "ignition1" 87 | Enabled: false 88 | LogLevel: INFO 89 | EndpointUrl: "opc.tcp://ubuntu1:62541/discovery" 90 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 91 | ConnectTimeout: 5000 92 | RequestTimeout: 5000 93 | KeepAliveFailuresAllowed: 10 94 | SubscriptionSamplingInterval: 0 95 | UsernameProvider: 96 | Username: opcuauser 97 | Password: password 98 | WriteParameters: 99 | QueueSize: 1000 100 | BlockSize: 100 101 | WithTime: true 102 | MonitoringParameters: 103 | BufferSize: 100 104 | SamplingInterval: 0.0 105 | DiscardOldest: false 106 | DataChangeTrigger: StatusValueTimestamp # Status | StatusValue | StatusValueTimestamp 107 | 108 | - Id: "unified1" 109 | Enabled: false 110 | LogLevel: INFO 111 | EndpointUrl: "opc.tcp://desktop-9o6hthf:4890" 112 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#Basic128Rsa15 113 | SubscriptionSamplingInterval: 0 114 | UsernameProvider: 115 | Username: opcuauser 116 | Password: password1 117 | WriteParameters: 118 | QueueSize: 1000 119 | BlockSize: 100 120 | WithTime: true 121 | MonitoringParameters: 122 | BufferSize: 10 123 | SamplingInterval: 0.0 124 | DiscardOldest: true 125 | 126 | - Id: "panel1" 127 | Enabled: false 128 | LogLevel: INFO 129 | EndpointUrl: "opc.tcp://192.168.1.136:4890" 130 | UpdateEndpointUrl: true 131 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#Basic128Rsa15 132 | SubscriptionSamplingInterval: 100 133 | UsernameProvider: 134 | Username: username1 135 | Password: password1 136 | WriteParameters: 137 | QueueSize: 1000 138 | BlockSize: 100 139 | WithTime: true 140 | MonitoringParameters: 141 | BufferSize: 10 142 | SamplingInterval: 1.0 143 | DiscardOldest: true 144 | 145 | Loggers: 146 | Neo4j: 147 | - Id: neo4j1 148 | Enabled: true 149 | Url: bolt://nuc1.rocworks.local:7687 150 | Username: "neo4j" 151 | Password: "manager" 152 | Schemas: 153 | - System: opc1 154 | RootNodes: 155 | - "ns=2;s=Demo" 156 | - System: opc2 157 | RootNodes: 158 | - "ns=2;s=Demo" 159 | - System: opc3 160 | RootNodes: 161 | - "ns=2;s=Demo" 162 | - System: winccoa1 163 | WriteParameters: 164 | BlockSize: 1000 165 | Logging: 166 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Float/+ 167 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Double/+ 168 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_Int16/+ 169 | - Topic: opc/opc1/path/Objects/Demo/SimulationMass/SimulationMass_String/+ 170 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Float/+ 171 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Double/+ 172 | - Topic: opc/opc2/path/Objects/Demo/SimulationMass/SimulationMass_Int16/+ 173 | - Topic: opc/opc3/path/Objects/Demo/SimulationMass/SimulationMass_Float/+ 174 | - Topic: opc/opc3/path/Objects/Demo/SimulationMass/SimulationMass_Double/+ 175 | - Topic: opc/opc3/path/Objects/Demo/SimulationMass/SimulationMass_Int16/+ 176 | - Topic: opc/winccoa1/path/Objects/PUMP1/# 177 | - Topic: opc/winccoa1/path/Objects/ExampleDP_Int/# 178 | 179 | -------------------------------------------------------------------------------- /source/app/configs/config-test-opensearch.yaml: -------------------------------------------------------------------------------- 1 | Servers: 2 | Mqtt: 3 | - Port: 1883 4 | Host: 0.0.0.0 5 | LogLevel: INFO # ALL | INFO 6 | 7 | Drivers: 8 | Mqtt: 9 | - Id: "home" 10 | Enabled: true 11 | LogLevel: INFO 12 | Host: scada 13 | Port: 1883 14 | Format: Json 15 | CustomJson: 16 | Value: "Value" 17 | TimestampMs: "TimeMS" 18 | TimestampIso: "TimeISO" 19 | 20 | OpcUa: 21 | - Id: "demo1" 22 | Enabled: true 23 | LogLevel: INFO 24 | EndpointUrl: "opc.tcp://192.168.1.4:62540/server" 25 | UpdateEndpointUrl: true 26 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 27 | KeepAliveFailuresAllowed: 0 28 | SubscriptionSamplingInterval: 0.0 29 | 30 | - Id: "demo2" 31 | Enabled: true 32 | LogLevel: INFO 33 | EndpointUrl: "opc.tcp://192.168.1.4:62541" 34 | UpdateEndpointUrl: true 35 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 36 | KeepAliveFailuresAllowed: 0 37 | SubscriptionSamplingInterval: 0.0 38 | 39 | Loggers: 40 | OpenSearch: 41 | - Id: "search1" 42 | Enabled: true 43 | LogLevel: INFO 44 | Host: linux0 45 | Port: 9200 46 | Index: gateway 47 | Logging: 48 | - Topic: mqtt/home/path/Original/# 49 | - Topic: opc/demo1/path/Objects/Variables/# 50 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/# 51 | 52 | -------------------------------------------------------------------------------- /source/app/configs/config-test-questdb.yaml: -------------------------------------------------------------------------------- 1 | Drivers: 2 | Mqtt: 3 | - Id: "home" 4 | Enabled: true 5 | LogLevel: INFO 6 | Host: scada.rocworks.local 7 | Port: 1883 8 | Format: Json 9 | CustomJson: 10 | Value: "Value" 11 | TimestampMs: "TimeMS" 12 | TimestampIso: "TimeISO" 13 | 14 | OpcUa: 15 | - Id: "demo1" 16 | Enabled: true 17 | LogLevel: INFO 18 | EndpointUrl: "opc.tcp://scada.rocworks.local:62541" 19 | UpdateEndpointUrl: true 20 | SecurityPolicy: None 21 | 22 | - Id: "demo2" 23 | Enabled: true 24 | LogLevel: INFO 25 | EndpointUrl: "opc.tcp://scada.rocworks.local:62542" 26 | UpdateEndpointUrl: true 27 | SecurityPolicy: None 28 | 29 | Loggers: 30 | QuestDB: 31 | - Id: Qdb0 32 | Enabled: true 33 | Config: http::addr=nuc1.rocworks.local:9001; 34 | Table: home1 35 | Logging: 36 | - Topic: mqtt/home/path/Original/# 37 | 38 | - Id: Qdb1 39 | Enabled: true 40 | Config: http::addr=nuc1.rocworks.local:9001; 41 | Table: demo1 42 | Logging: 43 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Boolean/# 44 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Byte/# 45 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Double/# 46 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 47 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Int16/# 48 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Int32/# 49 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Int64/# 50 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_SByte/# 51 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_UInt16/# 52 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_UInt32/# 53 | 54 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Duration/# 55 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_DateTime/## 56 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_String/# 57 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_ByteString/# 58 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_UtcTime/# 59 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_Guid/# 60 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_LocaleId/# 61 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_LocalizedText/# 62 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_NodeId/# 63 | - Topic: opc/demo1/path/Objects/Demo/SimulationMass/SimulationMass_XmlElement/# 64 | 65 | - Id: Qdb2 66 | Enabled: true 67 | Config: http::addr=nuc1.rocworks.local:9001; 68 | Table: demo2 69 | Logging: 70 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Boolean/# 71 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Byte/# 72 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Double/# 73 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Float/# 74 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Int16/# 75 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Int32/# 76 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Int64/# 77 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_SByte/# 78 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_UInt16/# 79 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_UInt32/# 80 | 81 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Duration/# 82 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_DateTime/# 83 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_String/# 84 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_ByteString/# 85 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_UtcTime/# 86 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_Guid/# 87 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_LocaleId/# 88 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_LocalizedText/# 89 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_NodeId/# 90 | - Topic: opc/demo2/path/Objects/Demo/SimulationMass/SimulationMass_XmlElement/# 91 | -------------------------------------------------------------------------------- /source/app/configs/config-test-zenoh.yaml: -------------------------------------------------------------------------------- 1 | Drivers: 2 | OpcUa: 3 | - Id: "oa" 4 | Enabled: true 5 | LogLevel: INFO 6 | EndpointUrl: "opc.tcp://localhost:4840" 7 | UpdateEndpointUrl: true 8 | SecurityPolicyUri: http://opcfoundation.org/UA/SecurityPolicy#None 9 | SubscriptionSamplingInterval: 0 10 | 11 | Loggers: 12 | Zenoh: 13 | - Id: Zenoh1 14 | Enabled: true 15 | Format: JsonSimple 16 | Key: WinccOA1 17 | Logging: 18 | - Topic: opc/oa/path/Objects/Home-Automation-Float/# -------------------------------------------------------------------------------- /source/app/src/main/kotlin/App.kt: -------------------------------------------------------------------------------- 1 | import at.rocworks.gateway.core.opcua.KeyStoreLoader 2 | import at.rocworks.gateway.core.service.Common 3 | import at.rocworks.gateway.core.service.Component 4 | import at.rocworks.gateway.logger.ImplyLogger 5 | import at.rocworks.gateway.logger.influx.InfluxDBLogger 6 | import at.rocworks.gateway.logger.questdb.QuestDBLogger 7 | import at.rocworks.gateway.logger.JdbcLogger 8 | import at.rocworks.gateway.logger.KafkaLogger 9 | import at.rocworks.gateway.logger.iotdb.IoTDBLogger 10 | import at.rocworks.gateway.logger.neo4j.Neo4jLogger 11 | import at.rocworks.gateway.logger.opensearch.OpenSearchLogger 12 | 13 | import io.vertx.core.Vertx 14 | import io.vertx.core.VertxOptions 15 | import io.vertx.core.json.JsonObject 16 | import java.util.concurrent.TimeUnit 17 | import java.util.logging.Logger 18 | 19 | object App { 20 | @Throws(Exception::class) 21 | @JvmStatic 22 | fun main(args: Array) { 23 | val options = VertxOptions() 24 | options.warningExceptionTime = 2 25 | options.warningExceptionTimeUnit = TimeUnit.SECONDS 26 | val vertx = Vertx.vertx(options) 27 | val logger = Logger.getLogger(javaClass.simpleName) 28 | 29 | fun factory(type: Component.ComponentType, config: JsonObject): Component? { 30 | return Component.defaultFactory(type, config) ?: when (type) { 31 | Component.ComponentType.InfluxDBLogger -> InfluxDBLogger.create(config) 32 | Component.ComponentType.QuestDBLogger -> QuestDBLogger(config) 33 | Component.ComponentType.IoTDBLogger -> IoTDBLogger(config) 34 | Component.ComponentType.KafkaLogger -> KafkaLogger(config) 35 | Component.ComponentType.JdbcLogger -> JdbcLogger(config) 36 | Component.ComponentType.Neo4jLogger -> Neo4jLogger(config) 37 | Component.ComponentType.OpenSearchLogger -> OpenSearchLogger(config) 38 | Component.ComponentType.ImplyLogger -> ImplyLogger(config) 39 | else -> { 40 | logger.severe("Unknown component type [${type}]") 41 | null 42 | } 43 | } 44 | } 45 | 46 | KeyStoreLoader.init() 47 | Common.initLogging() 48 | Common.initGateway(args, vertx, ::factory) 49 | } 50 | } -------------------------------------------------------------------------------- /source/app/src/main/resources/logging.properties: -------------------------------------------------------------------------------- 1 | handlers = java.util.logging.ConsoleHandler 2 | #handlers = java.util.logging.ConsoleHandler, java.util.logging.FileHandler 3 | 4 | java.util.logging.ConsoleHandler.level = ALL 5 | java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter 6 | 7 | java.util.logging.FileHandler.level = ALL 8 | java.util.logging.FileHandler.pattern = gateway.log.%g 9 | java.util.logging.FileHandler.limit = 50000 10 | java.util.logging.FileHandler.count = 1 11 | java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter 12 | 13 | java.util.logging.SimpleFormatter.format=[%1$tF %1$tT][%4$-7s][%3$-30.30s] %5$s %n%6$s 14 | -------------------------------------------------------------------------------- /source/app/src/main/resources/simplelogger.properties: -------------------------------------------------------------------------------- 1 | org.slf4j.simpleLogger.defaultLogLevel=error 2 | -------------------------------------------------------------------------------- /source/build.gradle: -------------------------------------------------------------------------------- 1 | ext { 2 | vertxVersion = "4.5.10" 3 | } -------------------------------------------------------------------------------- /source/buildSrc/build.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | */ 4 | 5 | plugins { 6 | // Support convention plugins written in Groovy. Convention plugins are build scripts in 'src/main' that automatically become available as plugins in the main build. 7 | id 'groovy-gradle-plugin' 8 | } 9 | 10 | repositories { 11 | // Use the plugin portal to apply community plugins in convention plugins. 12 | gradlePluginPortal() 13 | } 14 | 15 | dependencies { 16 | implementation 'org.jetbrains.kotlin:kotlin-gradle-plugin:1.9.20' 17 | } 18 | -------------------------------------------------------------------------------- /source/buildSrc/src/main/groovy/gateway.kotlin-application-conventions.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | */ 4 | 5 | plugins { 6 | // Apply the common convention plugin for shared build configuration between library and application projects. 7 | id 'gateway.kotlin-common-conventions' 8 | 9 | // Apply the application plugin to add support for building a CLI application in Java. 10 | id 'application' 11 | } 12 | -------------------------------------------------------------------------------- /source/buildSrc/src/main/groovy/gateway.kotlin-common-conventions.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | */ 4 | 5 | plugins { 6 | // Apply the org.jetbrains.kotlin.jvm Plugin to add support for Kotlin. 7 | id 'org.jetbrains.kotlin.jvm' 8 | } 9 | 10 | dependencies { 11 | constraints { 12 | // Define dependency versions as constraints 13 | implementation 'org.apache.commons:commons-text:1.9' 14 | 15 | implementation 'org.jetbrains.kotlin:kotlin-stdlib-jdk8' 16 | } 17 | 18 | // Align versions of all Kotlin components 19 | implementation platform('org.jetbrains.kotlin:kotlin-bom') 20 | 21 | // Use the Kotlin JDK 8 standard library. 22 | implementation 'org.jetbrains.kotlin:kotlin-stdlib-jdk8' 23 | 24 | // Align versions of all Kotlin components 25 | implementation platform('org.jetbrains.kotlin:kotlin-bom') 26 | 27 | // Use JUnit Jupiter API for testing. 28 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.6.2' 29 | 30 | // Use JUnit Jupiter Engine for testing. 31 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine' 32 | } 33 | 34 | tasks.named('test') { 35 | // Use junit platform for unit tests. 36 | useJUnitPlatform() 37 | } 38 | -------------------------------------------------------------------------------- /source/buildSrc/src/main/groovy/gateway.kotlin-library-conventions.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | */ 4 | 5 | plugins { 6 | // Apply the common convention plugin for shared build configuration between library and application projects. 7 | id 'gateway.kotlin-common-conventions' 8 | 9 | // Apply the java-library plugin for API and implementation separation. 10 | id 'java-library' 11 | } 12 | -------------------------------------------------------------------------------- /source/gradle-docker.sh: -------------------------------------------------------------------------------- 1 | docker run --rm -v "$PWD":/home/gradle/project -w /home/gradle/project gradle:8.4-jdk17 gradle $1 2 | -------------------------------------------------------------------------------- /source/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vogler75/automation-gateway/1f8b7fa2129cdf28651d9c9a540999b9cfd34096/source/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /source/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /source/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 48 | echo. 49 | echo Please set the JAVA_HOME variable in your environment to match the 50 | echo location of your Java installation. 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 62 | echo. 63 | echo Please set the JAVA_HOME variable in your environment to match the 64 | echo location of your Java installation. 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /source/lib-core/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-library-conventions' 3 | } 4 | 5 | dependencies { 6 | implementation 'org.eclipse.milo:sdk-client:0.6.14' 7 | implementation 'org.eclipse.milo:sdk-server:0.6.14' 8 | implementation 'org.eclipse.milo:dictionary-manager:0.6.14' 9 | implementation 'org.eclipse.milo:dictionary-reader:0.6.14' 10 | 11 | implementation "io.vertx:vertx-core:$vertxVersion" 12 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 13 | implementation "io.vertx:vertx-mqtt:$vertxVersion" 14 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 15 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 16 | implementation "io.vertx:vertx-web-client:$vertxVersion" 17 | implementation "io.vertx:vertx-web-graphql:$vertxVersion" 18 | implementation "io.vertx:vertx-kafka-client:$vertxVersion" 19 | 20 | implementation "io.reactivex.rxjava2:rxjava:2.2.21" 21 | implementation "org.eclipse.tahu:tahu-core:1.0.8" // Note: 1.0.10 leads to tons of log messages 22 | 23 | implementation 'org.slf4j:slf4j-api:1.7.32' 24 | implementation 'org.slf4j:slf4j-simple:1.7.32' 25 | } 26 | -------------------------------------------------------------------------------- /source/lib-core/src/main/java/at/rocworks/gateway/core/data/CodecDataPoint.java: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data; 2 | 3 | import io.vertx.core.buffer.Buffer; 4 | import io.vertx.core.eventbus.MessageCodec; 5 | import io.vertx.core.json.Json; 6 | import io.vertx.core.json.JsonObject; 7 | 8 | public class CodecDataPoint implements MessageCodec { 9 | 10 | 11 | @Override 12 | public void encodeToWire(Buffer buffer, DataPoint value) { 13 | Buffer data = value.encodeToJson().toBuffer(); 14 | buffer.appendInt(data.length()); 15 | buffer.appendBuffer(data); 16 | } 17 | 18 | @Override 19 | public DataPoint decodeFromWire(int i, Buffer buffer) { 20 | int len = buffer.getInt(i); 21 | JsonObject json = (JsonObject) Json.decodeValue(buffer.getBuffer(i, i+len)); 22 | return DataPoint.Companion.fromJsonObject(json); } 23 | 24 | @Override 25 | public DataPoint transform(DataPoint value) { 26 | return value; 27 | } 28 | 29 | @Override 30 | public String name() { 31 | return this.getClass().getSimpleName(); 32 | } 33 | 34 | @Override 35 | public byte systemCodecID() { 36 | return -1; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /source/lib-core/src/main/java/at/rocworks/gateway/core/data/CodecTopic.java: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data; 2 | 3 | import io.vertx.core.buffer.Buffer; 4 | import io.vertx.core.eventbus.MessageCodec; 5 | import io.vertx.core.json.Json; 6 | import io.vertx.core.json.JsonObject; 7 | 8 | public class CodecTopic implements MessageCodec { 9 | 10 | @Override 11 | public void encodeToWire(Buffer buffer, Topic mqttTopic) { 12 | Buffer data = mqttTopic.encodeToJson().toBuffer(); 13 | buffer.appendInt(data.length()); 14 | buffer.appendBuffer(data); 15 | } 16 | 17 | @Override 18 | public Topic decodeFromWire(int i, Buffer buffer) { 19 | int len = buffer.getInt(i); 20 | JsonObject json = (JsonObject)Json.decodeValue(buffer.getBuffer(i, i+len)); 21 | return Topic.Companion.decodeFromJson(json); 22 | } 23 | 24 | @Override 25 | public Topic transform(Topic mqttTopic) { 26 | return mqttTopic; 27 | } 28 | 29 | @Override 30 | public String name() { 31 | return this.getClass().getSimpleName(); 32 | } 33 | 34 | @Override 35 | public byte systemCodecID() { 36 | return -1; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /source/lib-core/src/main/java/at/rocworks/gateway/core/data/CodecTopicValue.java: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data; 2 | 3 | import io.vertx.core.buffer.Buffer; 4 | import io.vertx.core.eventbus.MessageCodec; 5 | import io.vertx.core.json.Json; 6 | import io.vertx.core.json.JsonObject; 7 | 8 | public class CodecTopicValue implements MessageCodec { 9 | 10 | @Override 11 | public void encodeToWire(Buffer buffer, TopicValue value) { 12 | Buffer data = value.encodeToJson().toBuffer(); 13 | buffer.appendInt(data.length()); 14 | buffer.appendBuffer(data); 15 | } 16 | 17 | @Override 18 | public TopicValue decodeFromWire(int i, Buffer buffer) { 19 | int len = buffer.getInt(i); 20 | JsonObject json = (JsonObject) Json.decodeValue(buffer.getBuffer(i, i+len)); 21 | return TopicValue.Companion.decodeFromJson(json); 22 | } 23 | 24 | @Override 25 | public TopicValue transform(TopicValue value) { 26 | return value; 27 | } 28 | 29 | @Override 30 | public String name() { 31 | return this.getClass().getSimpleName(); 32 | } 33 | 34 | @Override 35 | public byte systemCodecID() { 36 | return -1; 37 | } 38 | } 39 | 40 | -------------------------------------------------------------------------------- /source/lib-core/src/main/java/at/rocworks/gateway/core/data/GenericCodec.java: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data; 2 | import java.io.ByteArrayInputStream; 3 | import java.io.ByteArrayOutputStream; 4 | import java.io.IOException; 5 | import java.io.ObjectInputStream; 6 | import java.io.ObjectOutput; 7 | import java.io.ObjectOutputStream; 8 | import io.vertx.core.buffer.Buffer; 9 | import io.vertx.core.eventbus.MessageCodec; 10 | 11 | public class GenericCodec implements MessageCodec { 12 | private final Class genericClass; 13 | 14 | public GenericCodec(Class genericClass) { 15 | super(); 16 | this.genericClass = genericClass; 17 | } 18 | 19 | @Override 20 | public void encodeToWire(Buffer buffer, T object) { 21 | ByteArrayOutputStream byteOutput = new ByteArrayOutputStream(); 22 | ObjectOutput objectOutput = null; 23 | try { 24 | objectOutput = new ObjectOutputStream(byteOutput); 25 | objectOutput.writeObject(object); 26 | objectOutput.flush(); 27 | byte[] objectBytes = byteOutput.toByteArray(); 28 | buffer.appendInt(objectBytes.length); 29 | buffer.appendBytes(objectBytes); 30 | objectOutput.close(); 31 | } catch (IOException e) { 32 | e.printStackTrace(); 33 | } finally { 34 | try { 35 | byteOutput.close(); 36 | } catch (IOException e) { 37 | e.printStackTrace(); 38 | } 39 | } 40 | } 41 | 42 | @Override 43 | public T decodeFromWire(int pos, Buffer buffer) { 44 | // Length of JSON 45 | int length = buffer.getInt(pos); 46 | 47 | // Jump 4 because getInt() == 4 bytes 48 | byte[] bytes = buffer.getBytes(pos += 4, pos + length); 49 | ByteArrayInputStream byteInput = new ByteArrayInputStream(bytes); 50 | try { 51 | ObjectInputStream objectInput = new ObjectInputStream(byteInput); 52 | @SuppressWarnings("unchecked") 53 | T object = (T) objectInput.readObject(); 54 | objectInput.close(); 55 | return object; 56 | } catch (IOException | ClassNotFoundException e) { 57 | System.out.println("Decode failed "+e.getMessage()); 58 | return null; 59 | } finally { 60 | try { 61 | byteInput.close(); 62 | } catch (IOException e) { 63 | e.printStackTrace(); 64 | } 65 | } 66 | } 67 | 68 | @Override 69 | public T transform(T object) { 70 | return object; 71 | } 72 | 73 | @Override 74 | public String name() { 75 | return "Codec"+genericClass.getSimpleName(); 76 | } 77 | 78 | @Override 79 | public byte systemCodecID() { return -1; } 80 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/java/at/rocworks/gateway/core/opcua/HostnameUtil.java: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua; 2 | 3 | import java.net.Inet4Address; 4 | import java.net.InetAddress; 5 | import java.net.NetworkInterface; 6 | import java.net.SocketException; 7 | import java.net.UnknownHostException; 8 | import java.util.Collections; 9 | import java.util.Enumeration; 10 | import java.util.Set; 11 | import java.util.logging.Logger; 12 | 13 | import static com.google.common.collect.Sets.newHashSet; 14 | 15 | public class HostnameUtil { 16 | 17 | /** 18 | * @return the local hostname, if possible. Failure results in "localhost". 19 | */ 20 | public static String getHostname() { 21 | try { 22 | return InetAddress.getLocalHost().getHostName(); 23 | } catch (UnknownHostException e) { 24 | return "localhost"; 25 | } 26 | } 27 | 28 | /** 29 | * Given an address resolve it to as many unique addresses or hostnames as can be found. 30 | * 31 | * @param address the address to resolve. 32 | * @return the addresses and hostnames that were resolved from {@code address}. 33 | */ 34 | public static Set getHostnames(String address) { 35 | return getHostnames(address, true); 36 | } 37 | 38 | /** 39 | * Given an address resolve it to as many unique addresses or hostnames as can be found. 40 | * 41 | * @param address the address to resolve. 42 | * @param includeLoopback if {@code true} loopback addresses will be included in the returned set. 43 | * @return the addresses and hostnames that were resolved from {@code address}. 44 | */ 45 | public static Set getHostnames(String address, boolean includeLoopback) { 46 | Set hostnames = newHashSet(); 47 | 48 | try { 49 | InetAddress inetAddress = InetAddress.getByName(address); 50 | 51 | if (inetAddress.isAnyLocalAddress()) { 52 | try { 53 | Enumeration nis = NetworkInterface.getNetworkInterfaces(); 54 | 55 | for (NetworkInterface ni : Collections.list(nis)) { 56 | Collections.list(ni.getInetAddresses()).forEach(ia -> { 57 | if (ia instanceof Inet4Address) { 58 | if (includeLoopback || !ia.isLoopbackAddress()) { 59 | hostnames.add(ia.getHostName()); 60 | hostnames.add(ia.getHostAddress()); 61 | hostnames.add(ia.getCanonicalHostName()); 62 | } 63 | } 64 | }); 65 | } 66 | } catch (SocketException e) { 67 | Logger.getLogger(HostnameUtil.class.getSimpleName()) 68 | .warning(String.format("Failed to NetworkInterfaces for bind address: %s %s", address, e.getMessage())); 69 | } 70 | } else { 71 | if (includeLoopback || !inetAddress.isLoopbackAddress()) { 72 | hostnames.add(inetAddress.getHostName()); 73 | hostnames.add(inetAddress.getHostAddress()); 74 | hostnames.add(inetAddress.getCanonicalHostName()); 75 | } 76 | } 77 | } catch (UnknownHostException e) { 78 | Logger.getLogger(HostnameUtil.class.getSimpleName()) 79 | .warning(String.format("Failed to get InetAddress for bind address: %s %s", address, e.getMessage())); 80 | } 81 | 82 | return hostnames; 83 | } 84 | 85 | } 86 | -------------------------------------------------------------------------------- /source/lib-core/src/main/java/at/rocworks/gateway/core/opcua/KeyStoreLoader.java: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua; 2 | 3 | import java.io.InputStream; 4 | import java.io.OutputStream; 5 | import java.nio.file.Files; 6 | import java.nio.file.Path; 7 | import java.nio.file.Paths; 8 | import java.security.*; 9 | import java.security.cert.X509Certificate; 10 | import java.util.logging.Logger; 11 | import java.util.regex.Pattern; 12 | import java.util.Arrays; 13 | 14 | import org.eclipse.milo.opcua.stack.core.util.SelfSignedCertificateBuilder; 15 | import org.eclipse.milo.opcua.stack.core.util.SelfSignedCertificateGenerator; 16 | 17 | 18 | public class KeyStoreLoader { 19 | public static KeyStoreLoader keyStoreLoader; 20 | 21 | public static final String APPLICATION_NAME = "Automation Gateway@" + HostnameUtil.getHostname(); 22 | public static final String APPLICATION_URI = String.format("urn:ROCWORKS.Gateway"); 23 | 24 | private static final Pattern IP_ADDR_PATTERN = Pattern.compile( 25 | "^(([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\.){3}([01]?\\d\\d?|2[0-4]\\d|25[0-5])$"); 26 | 27 | private static final String CLIENT_ALIAS = "client-ai"; 28 | private static final char[] PASSWORD = "password".toCharArray(); 29 | 30 | private final Logger logger = Logger.getLogger(getClass().getSimpleName()); 31 | 32 | private X509Certificate clientCertificate; 33 | private X509Certificate[] clientCertificateChain; 34 | 35 | private KeyPair clientKeyPair; 36 | 37 | public static void init() throws Exception { 38 | Path securityDir = getPkiDir(); 39 | Files.createDirectories(securityDir); 40 | if (!Files.exists(securityDir)) { 41 | throw new Exception("Unable to create security dir: " + securityDir); 42 | } 43 | keyStoreLoader = new KeyStoreLoader().load(securityDir); 44 | } 45 | 46 | public static Path getPkiDir() { 47 | String dirName = System.getenv("GATEWAY_SECURITY_DIRECTORY"); 48 | if (dirName == null) dirName = "."; 49 | Path securityDir = Paths.get(dirName, "security"); 50 | return securityDir; 51 | } 52 | 53 | KeyStoreLoader load(Path baseDir) throws Exception { 54 | KeyStore keyStore = KeyStore.getInstance("PKCS12"); 55 | 56 | Path serverKeyStore = baseDir.resolve("rocworks-gateway.pfx"); 57 | 58 | logger.info("Loading Client KeyStore at " + serverKeyStore); 59 | 60 | if (!Files.exists(serverKeyStore)) { 61 | logger.info("Create new certificate..."); 62 | keyStore.load(null, PASSWORD); 63 | 64 | KeyPair keyPair = SelfSignedCertificateGenerator.generateRsaKeyPair(2048); 65 | 66 | SelfSignedCertificateBuilder builder = new SelfSignedCertificateBuilder(keyPair) 67 | .setCommonName(APPLICATION_NAME) 68 | .setApplicationUri(APPLICATION_URI) 69 | .setOrganization("ROCWORKS") 70 | .setOrganizationalUnit("R&D") 71 | .setLocalityName("Mattersburg") 72 | .setCountryCode("AT") 73 | .addDnsName("localhost") 74 | .addIpAddress("127.0.0.1"); 75 | 76 | // Get as many hostnames and IP addresses as we can listed in the certificate. 77 | for (String hostname : HostnameUtil.getHostnames("0.0.0.0")) { 78 | if (IP_ADDR_PATTERN.matcher(hostname).matches()) { 79 | logger.info("Ip: "+hostname); 80 | builder.addIpAddress(hostname); 81 | } else { 82 | logger.info("DNS: "+hostname); 83 | builder.addDnsName(hostname); 84 | } 85 | } 86 | 87 | X509Certificate certificate = builder.build(); 88 | 89 | keyStore.setKeyEntry(CLIENT_ALIAS, keyPair.getPrivate(), PASSWORD, new X509Certificate[]{certificate}); 90 | try (OutputStream out = Files.newOutputStream(serverKeyStore)) { 91 | keyStore.store(out, PASSWORD); 92 | } 93 | } else { 94 | logger.info("Load existing certificate..."); 95 | try (InputStream in = Files.newInputStream(serverKeyStore)) { 96 | keyStore.load(in, PASSWORD); 97 | } 98 | } 99 | 100 | Key serverPrivateKey = keyStore.getKey(CLIENT_ALIAS, PASSWORD); 101 | if (serverPrivateKey instanceof PrivateKey) { 102 | clientCertificate = (X509Certificate) keyStore.getCertificate(CLIENT_ALIAS); 103 | 104 | clientCertificateChain = Arrays.stream(keyStore.getCertificateChain(CLIENT_ALIAS)) 105 | .map(X509Certificate.class::cast) 106 | .toArray(X509Certificate[]::new); 107 | 108 | PublicKey serverPublicKey = clientCertificate.getPublicKey(); 109 | clientKeyPair = new KeyPair(serverPublicKey, (PrivateKey) serverPrivateKey); 110 | } 111 | 112 | logger.info("Loaded certificate."); 113 | return this; 114 | } 115 | 116 | public X509Certificate getClientCertificate() { 117 | return clientCertificate; 118 | } 119 | 120 | public X509Certificate[] getClientCertificateChain() { 121 | return clientCertificateChain; 122 | } 123 | 124 | public KeyPair getClientKeyPair() { 125 | return clientKeyPair; 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/data/DataPoint.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data 2 | 3 | import io.vertx.core.json.JsonObject 4 | import java.io.Serializable 5 | 6 | data class DataPoint ( 7 | val topic: Topic, 8 | val value: TopicValue 9 | ) : Serializable { 10 | fun encodeToJson(): JsonObject = JsonObject() 11 | .put("topic", topic.encodeToJson()) 12 | .put("value", value.encodeToJson()) 13 | 14 | companion object { 15 | fun fromJsonObject(json: JsonObject): DataPoint = DataPoint( 16 | Topic.decodeFromJson(json.getJsonObject("topic")), 17 | TopicValue.decodeFromJson(json.getJsonObject("value")) 18 | ) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/data/EventBus.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data 2 | 3 | import io.vertx.core.Vertx 4 | import io.vertx.core.buffer.Buffer 5 | import io.vertx.core.eventbus.Message 6 | import io.vertx.core.eventbus.MessageConsumer 7 | import io.vertx.core.json.JsonArray 8 | import io.vertx.core.json.JsonObject 9 | import java.util.logging.Logger 10 | 11 | class EventBus(val logger: Logger) { 12 | fun requestPublishTopicBuffer( 13 | vertx: Vertx, 14 | topic: Topic, 15 | data: Buffer 16 | ) { 17 | val target = topic.copy(dataFormat=Topic.Format.Value) 18 | val address = "${target.systemType.name}/${target.systemName}/Publish" 19 | 20 | val request = JsonObject() 21 | request.put("Topic", target.encodeToJson()) 22 | request.put("Buffer", data) 23 | 24 | vertx.eventBus().request(address, request) { 25 | logger.finest { "Publish response [${it.succeeded()}] [${it.result()?.body()}]" } 26 | } 27 | } 28 | 29 | fun requestPublishTopicValue( 30 | vertx: Vertx, 31 | topic: Topic, 32 | value: TopicValue 33 | ) { 34 | requestPublishDataPoint(vertx, DataPoint(topic, value)) 35 | } 36 | 37 | fun requestPublishDataPoint( 38 | vertx: Vertx, 39 | data: DataPoint 40 | ) { 41 | val address = "${data.topic.systemType.name}/${data.topic.systemName}/Publish" 42 | 43 | val request = JsonObject() 44 | request.put("Topic", data.topic.encodeToJson()) 45 | request.put("Value", data.value.encodeToJson()) 46 | 47 | vertx.eventBus().request(address, request) { 48 | logger.finest { "Publish response [${it.succeeded()}] [${it.result()?.body()}]" } 49 | } 50 | } 51 | 52 | fun requestSubscribeTopic( 53 | vertx: Vertx, 54 | clientId: String, 55 | topic: Topic, 56 | onComplete: (Boolean, MessageConsumer)->Unit, 57 | onMessage: (Topic, Message)->Unit 58 | ) 59 | { 60 | val consumer = vertx.eventBus().consumer(topic.topicName) { 61 | onMessage(topic, it) 62 | } 63 | val address = "${topic.systemType}/${topic.systemName}/Subscribe" 64 | val request = JsonObject() 65 | .put("ClientId", clientId) 66 | .put("Topic", topic.encodeToJson()) 67 | vertx.eventBus().request(address, request) { 68 | logger.fine { "Subscribe response [${it.succeeded()}] [${it.result()?.body()}]" } 69 | if (it.succeeded() && it.result().body().getBoolean("Ok")) { 70 | onComplete(true, consumer) 71 | } else { 72 | consumer.unregister() 73 | onComplete(false, consumer) 74 | } 75 | } 76 | } 77 | 78 | fun requestUnsubscribeTopic( 79 | vertx: Vertx, 80 | clientId: String, 81 | topic: Topic, 82 | onComplete: (Boolean)->Unit 83 | ) { 84 | val request = JsonObject().put("ClientId", clientId) 85 | request.put("Topic", topic.encodeToJson()) 86 | val address = "${topic.systemType}/${topic.systemName}" 87 | logger.fine { "Unsubscribe from [${address}] [$topic]" } 88 | vertx.eventBus().request("${address}/Unsubscribe", request) { 89 | logger.fine { "Unsubscribe response [${it.succeeded()}] [${it.result()?.body()}]" } 90 | val ok = it.succeeded() && it.result().body().getBoolean("Ok") 91 | onComplete(ok) 92 | } 93 | } 94 | 95 | fun requestUnsubscribeTopics( 96 | vertx: Vertx, 97 | clientId: String, 98 | topics: List, 99 | onComplete: (Boolean, List)->Unit 100 | ) { 101 | topics.groupBy { "${it.systemType}/${it.systemName}" }.forEach { group -> 102 | val request = JsonObject().put("ClientId", clientId) 103 | request.put("Topics", JsonArray(group.value.map { it.encodeToJson() })) 104 | val address = "${group.key}/Unsubscribe" 105 | vertx.eventBus().request(address, request) { 106 | logger.fine { "Unsubscribe response [${it.succeeded()}] [${it.result()?.body()}]" } 107 | val ok = it.succeeded() && it.result().body().getBoolean("Ok") 108 | onComplete(ok, group.value) 109 | } 110 | } 111 | } 112 | 113 | fun publishDataPoint( 114 | vertx: Vertx, 115 | dataPoint: DataPoint 116 | ) { 117 | vertx.eventBus().publish(dataPoint.topic.topicName, dataPoint) 118 | } 119 | 120 | fun publishBufferValue( 121 | vertx: Vertx, 122 | topic: String, 123 | value: Buffer 124 | ) { 125 | vertx.eventBus().publish(topic, value) 126 | } 127 | 128 | fun publishJsonValue( 129 | vertx: Vertx, 130 | topic: String, 131 | value: JsonObject 132 | ) { 133 | vertx.eventBus().publish(topic, value) 134 | } 135 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/data/TopicStatus.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data 2 | 3 | class TopicStatus { 4 | companion object { 5 | const val GOOD = "Good" 6 | const val BAD = "Bad" 7 | } 8 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/data/TopicValue.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.data 2 | 3 | import io.vertx.core.buffer.Buffer 4 | import io.vertx.core.json.JsonObject 5 | import java.io.Serializable 6 | import java.time.Instant 7 | 8 | data class TopicValue ( 9 | val value: Any?, 10 | val statusCode: String = "", 11 | val sourceTime: Instant = Instant.now(), 12 | val serverTime: Instant = Instant.now() 13 | //val sourcePicoseconds: Int = 0, 14 | //val serverPicoseconds: Int = 0 15 | ) : Serializable { 16 | // default constructor needed for json to object mapping 17 | fun hasValue() = value!=null 18 | fun hasNoValue() = value==null 19 | 20 | fun valueAsObject() = value 21 | fun statusAsString() = statusCode 22 | fun valueAsString() = value?.toString() ?: "" 23 | fun valueAsDouble(): Double? = when (value) { 24 | is Number -> value.toDouble() 25 | else -> valueAsString().toDoubleOrNull() 26 | } 27 | fun stringValue(): String = if (value is String || value is Buffer) value.toString() else "" 28 | 29 | fun isStatusGood() = statusCode == TopicStatus.GOOD 30 | 31 | fun serverTime() = serverTime 32 | fun sourceTime() = sourceTime 33 | fun sourceTimeMs(): Long = sourceTime.toEpochMilli() 34 | fun serverTimeMs(): Long = serverTime.toEpochMilli() 35 | fun serverTimeAsISO(): String = serverTime.toString() 36 | fun sourceTimeAsISO(): String = sourceTime.toString() 37 | 38 | fun dataTypeName(): String = valueAsObject()?.javaClass?.simpleName ?: "" 39 | 40 | override fun toString(): String = encodeToJson().toString() 41 | 42 | fun encodeToJson(): JsonObject { 43 | //JsonObject.mapFrom(this) 44 | return JsonObject() 45 | .put("value", value) 46 | .put("dataType", dataTypeName()) 47 | .put("statusCode", statusCode) 48 | .put("sourceTime", sourceTimeAsISO()) 49 | .put("serverTime", serverTimeAsISO()) 50 | .put("sourceTimeMs", sourceTimeMs()) 51 | .put("serverTimeMs", serverTimeMs()) 52 | //.put("sourcePicoseconds", sourcePicoseconds) 53 | //.put("serverPicoseconds", serverPicoseconds) 54 | } 55 | 56 | companion object { 57 | fun decodeFromJson(json: JsonObject): TopicValue { 58 | //json.mapTo(TopicValue::class.java) 59 | fun getTime(name: String) : Instant { 60 | val ms = json.getLong("${name}Ms") 61 | return if (ms != null) Instant.ofEpochMilli(ms) 62 | else { 63 | val iso = json.getString(name) 64 | if (iso == null) Instant.now() 65 | else Instant.parse(iso) 66 | } 67 | } 68 | 69 | return TopicValue( 70 | json.getValue("value", null), 71 | json.getString("statusCode", ""), 72 | getTime("sourceTime"), 73 | getTime("serverTime") 74 | //json.getInteger("sourcePicoseconds", 0), 75 | //json.getInteger("serverPicoseconds", 0) 76 | ) 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/driver/MonitoredItem.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.driver 2 | 3 | abstract class MonitoredItem { 4 | abstract val item: Any 5 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/driver/Registry.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.driver 2 | 3 | import at.rocworks.gateway.core.data.Topic 4 | import java.util.ArrayList 5 | import java.util.HashMap 6 | 7 | class Registry { 8 | private val topics = HashSet() 9 | private val topicClients = HashMap>() 10 | private val topicMonitoredItems = HashMap>() 11 | 12 | fun addClient(clientId: String, topic: Topic) : Pair { 13 | val clients = topicClients.getOrDefault(topic.topicName, HashSet()) 14 | if (clients.size == 0) topicClients[topic.topicName] = clients 15 | val added = clients.add(clientId) 16 | return Pair(clients.size, added) 17 | } 18 | 19 | fun delClient(clientId: String, topic: Topic) : Pair { 20 | val clients = topicClients.getOrDefault(topic.topicName, HashSet()) 21 | val removed = clients.remove(clientId) 22 | if (clients.size==0) topicClients.remove(topic.topicName) 23 | return Pair(clients.size, removed) 24 | } 25 | 26 | fun addMonitoredItem(item: MonitoredItem, topic: Topic) { 27 | topics.add(topic) 28 | val items = topicMonitoredItems.getOrDefault(topic.topicName, ArrayList()) 29 | if (items.size == 0) topicMonitoredItems[topic.topicName] = items 30 | items.add(item) 31 | } 32 | 33 | fun delMonitoredItems(topic: Topic) { 34 | topicMonitoredItems.remove(topic.topicName) 35 | } 36 | 37 | fun getTopics(): List { 38 | return topics.toList() 39 | } 40 | 41 | fun delTopic(topic: Topic) : List { 42 | topics.remove(topic) 43 | val result = topicMonitoredItems.remove(topic.topicName) 44 | return result ?: ArrayList() 45 | } 46 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/logger/ILoggerQueue.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.logger 2 | 3 | import at.rocworks.gateway.core.data.DataPoint 4 | 5 | interface ILoggerQueue { 6 | fun isQueueFull() : Boolean 7 | fun getSize(): Int 8 | fun getCapacity(): Int 9 | 10 | fun add(dp: DataPoint) 11 | 12 | fun pollBlock(handler: (DataPoint)->Unit): Int 13 | 14 | fun pollCommit() 15 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/logger/LoggerQueueDisk.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.logger 2 | 3 | import at.rocworks.gateway.core.data.DataPoint 4 | import java.io.* 5 | import java.nio.MappedByteBuffer 6 | import java.nio.channels.FileChannel 7 | import java.util.concurrent.Semaphore 8 | import java.util.concurrent.TimeUnit 9 | import java.util.concurrent.locks.ReentrantLock 10 | import java.util.logging.Logger 11 | 12 | class LoggerQueueDisk( 13 | private val id: String, 14 | private val logger: Logger, 15 | private val queueSize: Int, 16 | private val blockSize: Int, 17 | private val pollTimeout: Long, 18 | private val diskPath: String 19 | ) : ILoggerQueue { 20 | private var writeValueQueueFull = false 21 | 22 | private val outputBlock = arrayListOf() 23 | private val semaphore = Semaphore(0) // Start with 0 permits (blocking initially) 24 | 25 | private val fileSize = queueSize.toLong() // one datapoint is about 1408 bytes 26 | private val fileName = "$diskPath/${id}.buf" 27 | private val file: RandomAccessFile = RandomAccessFile(fileName, "rw") 28 | private val buffer: MappedByteBuffer 29 | private val lock = ReentrantLock() 30 | 31 | private val startPosition = Int.SIZE_BYTES * 2 32 | private var writePosition = startPosition 33 | private var readPosition = startPosition 34 | 35 | init { 36 | if (file.length() != queueSize.toLong()) { 37 | file.setLength(fileSize) 38 | buffer = file.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, fileSize) 39 | writeReadPosition() 40 | writeWritePosition() 41 | } else { 42 | buffer = file.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, fileSize) 43 | buffer.position(0) 44 | readPosition = buffer.int 45 | writePosition = buffer.int 46 | logger.info("Initial position read $readPosition write $writePosition") 47 | } 48 | } 49 | 50 | private fun enqueue(dp: DataPoint): Boolean { 51 | lock.lock() 52 | //println("> ${dp.value.valueAsString()}") 53 | try { 54 | val dataBytes = serialize(dp) 55 | val dataSize = dataBytes.size + Int.SIZE_BYTES 56 | logger.finest { "Enqueue: ReadPos $readPosition WritePos $writePosition Data size: ${dataBytes.size} " } 57 | 58 | if (writePosition + dataSize > file.length()) { 59 | buffer.position(writePosition) 60 | buffer.putInt(0) // mark EOF 61 | writePosition = startPosition 62 | } 63 | if (writePosition < readPosition && writePosition + dataSize >= readPosition) { 64 | return false 65 | } else { 66 | buffer.position(writePosition) 67 | buffer.putInt(dataBytes.size) 68 | buffer.put(dataBytes) 69 | writePosition += dataSize 70 | writeWritePosition() 71 | return true 72 | } 73 | } finally { 74 | lock.unlock() 75 | } 76 | } 77 | 78 | private fun writeReadPosition() { 79 | buffer.position(0) 80 | buffer.putInt(readPosition) 81 | } 82 | 83 | private fun writeWritePosition() { 84 | buffer.position(Int.SIZE_BYTES) 85 | buffer.putInt(writePosition) 86 | } 87 | 88 | private fun dequeue(): DataPoint? { 89 | lock.lock() 90 | try { 91 | if (readPosition == writePosition) { 92 | return null // Queue is empty 93 | } 94 | logger.finest { "Dequeue: ReadPos $readPosition WritePos $writePosition" } 95 | 96 | buffer.position(readPosition) 97 | var dataSize = buffer.int 98 | if (dataSize == 0) { // EOF 99 | readPosition = startPosition 100 | if (writePosition == startPosition) { 101 | return null // Queue is empty 102 | } else { 103 | buffer.position(startPosition) 104 | dataSize = buffer.int 105 | } 106 | } 107 | val dataBytes = ByteArray(dataSize) 108 | buffer.get(dataBytes) 109 | val dp = deserialize(dataBytes) 110 | 111 | readPosition += dataSize + Int.SIZE_BYTES 112 | 113 | //println("< ${dp.value.valueAsString()}") 114 | 115 | return dp 116 | } finally { 117 | lock.unlock() 118 | } 119 | } 120 | 121 | fun close() { 122 | buffer.force() 123 | file.close() 124 | } 125 | 126 | override fun isQueueFull(): Boolean { 127 | return writeValueQueueFull; 128 | } 129 | 130 | override fun getCapacity(): Int { 131 | return queueSize 132 | } 133 | 134 | override fun getSize(): Int { 135 | val size = writePosition - readPosition 136 | return if (size >= 0) size 137 | else queueSize + size 138 | } 139 | 140 | override fun add(dp: DataPoint) { 141 | if (enqueue(dp)) { 142 | if (writeValueQueueFull) { 143 | writeValueQueueFull = false 144 | logger.warning("Logger write queue not full anymore. [${getSize()}]") 145 | } 146 | } else { 147 | if (!writeValueQueueFull) { 148 | writeValueQueueFull = true 149 | logger.warning("Logger write queue is full! [${getSize()}]") 150 | } 151 | } 152 | semaphore.release() 153 | } 154 | 155 | override fun pollBlock(handler: (DataPoint) -> Unit): Int { 156 | if (outputBlock.isNotEmpty()) { 157 | logger.warning("Repeat last data block in one second...") 158 | Thread.sleep(1000) 159 | logger.warning("Repeat last data block.") 160 | outputBlock.forEach(handler) 161 | return outputBlock.size 162 | } else { 163 | var point: DataPoint? = dequeue() 164 | if (point == null) { 165 | if (semaphore.tryAcquire(pollTimeout, TimeUnit.MILLISECONDS)) { 166 | point = dequeue() 167 | } 168 | } 169 | while (point != null) { 170 | if (point.value.sourceTime.epochSecond > 0) { 171 | outputBlock.add(point) 172 | handler(point) 173 | } 174 | point = if (outputBlock.size < blockSize) dequeue() else null 175 | } 176 | return outputBlock.size 177 | } 178 | } 179 | 180 | override fun pollCommit() { 181 | lock.lock() 182 | try { 183 | outputBlock.clear() 184 | writeReadPosition() 185 | } finally { 186 | lock.unlock() 187 | } 188 | } 189 | 190 | private fun serialize(dp: DataPoint) : ByteArray { 191 | ByteArrayOutputStream().use { baos -> 192 | ObjectOutputStream(baos).use { oos -> 193 | oos.writeObject(dp) 194 | return baos.toByteArray() 195 | } 196 | } 197 | } 198 | 199 | private fun deserialize(serializedData: ByteArray) : DataPoint { 200 | ByteArrayInputStream(serializedData).use { bais -> 201 | ObjectInputStream(bais).use { ois -> 202 | return ois.readObject() as DataPoint 203 | } 204 | } 205 | } 206 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/logger/LoggerQueueMemory.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.logger 2 | 3 | import at.rocworks.gateway.core.data.DataPoint 4 | import java.util.concurrent.ArrayBlockingQueue 5 | import java.util.concurrent.TimeUnit 6 | import java.util.logging.Logger 7 | 8 | class LoggerQueueMemory( 9 | private val logger: Logger, 10 | private val writeParameterQueueSize: Int, 11 | private val writeParameterBlockSize: Int, 12 | private val writeQueuePollTimeout: Long 13 | ) : ILoggerQueue { 14 | private val writeValueQueue = ArrayBlockingQueue(writeParameterQueueSize) 15 | private var writeValueQueueFull = false 16 | 17 | private val outputBlock = arrayListOf() 18 | 19 | override fun isQueueFull(): Boolean = writeValueQueueFull 20 | override fun getCapacity(): Int = writeParameterQueueSize 21 | override fun getSize(): Int = writeValueQueue.size 22 | 23 | override fun add(dp: DataPoint) { 24 | try { 25 | writeValueQueue.add(dp) 26 | if (writeValueQueueFull) { 27 | writeValueQueueFull = false 28 | logger.warning("Logger write queue not full anymore. [${getSize()}]") 29 | } 30 | } catch (e: IllegalStateException) { 31 | if (!writeValueQueueFull) { 32 | writeValueQueueFull = true 33 | logger.warning("Logger write queue is full! [${getSize()}]") 34 | } 35 | } 36 | } 37 | 38 | private fun pollWait(): DataPoint? = writeValueQueue.poll(writeQueuePollTimeout, TimeUnit.MILLISECONDS) 39 | 40 | private fun pollNoWait(): DataPoint? = writeValueQueue.poll() 41 | 42 | override fun pollBlock(handler: (DataPoint)->Unit): Int { 43 | if (outputBlock.isNotEmpty()) { 44 | logger.warning("Repeat last data block in one second...") 45 | Thread.sleep(1000) 46 | logger.warning("Repeat last data block.") 47 | outputBlock.forEach(handler) 48 | return outputBlock.size 49 | } else { 50 | var point: DataPoint? = pollWait() 51 | while (point != null) { 52 | if (point.value.sourceTime.epochSecond > 0) { 53 | outputBlock.add(point) 54 | handler(point) 55 | } 56 | point = if (outputBlock.size < writeParameterBlockSize) pollNoWait() else null 57 | } 58 | return outputBlock.size 59 | } 60 | } 61 | 62 | override fun pollCommit() { 63 | outputBlock.clear() 64 | } 65 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/mqtt/MqttMonitoredItem.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.mqtt 2 | 3 | import at.rocworks.gateway.core.driver.MonitoredItem 4 | 5 | class MqttMonitoredItem(override val item: String) : MonitoredItem() { 6 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/mqtt/MqttServer.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.mqtt 2 | 3 | import at.rocworks.gateway.core.data.DataPoint 4 | import at.rocworks.gateway.core.data.Topic 5 | import at.rocworks.gateway.core.data.Topic.Format 6 | import at.rocworks.gateway.core.graphql.ConfigServer 7 | import at.rocworks.gateway.core.service.Common 8 | import at.rocworks.gateway.core.service.Component 9 | import at.rocworks.gateway.core.service.ComponentLogger 10 | import io.netty.handler.codec.mqtt.MqttQoS 11 | import io.vertx.core.* 12 | import io.vertx.core.buffer.Buffer 13 | import io.vertx.core.eventbus.MessageConsumer 14 | import io.vertx.core.json.JsonArray 15 | import io.vertx.core.json.JsonObject 16 | import io.vertx.mqtt.MqttEndpoint 17 | import io.vertx.mqtt.MqttServer 18 | import io.vertx.mqtt.MqttServerOptions 19 | import io.vertx.mqtt.MqttTopicSubscription 20 | import io.vertx.mqtt.messages.MqttPublishMessage 21 | import io.vertx.mqtt.messages.MqttSubscribeMessage 22 | import io.vertx.mqtt.messages.MqttUnsubscribeMessage 23 | 24 | import java.util.logging.Level 25 | import java.util.logging.Logger 26 | 27 | class MqttServer(config: JsonObject) : Component(config) { 28 | private val id = config.getString("Id", "") 29 | private val logger = ComponentLogger.getLogger(this::class.java.simpleName, id) 30 | 31 | private val host = config.getString("Host", "0.0.0.0") 32 | private val port = config.getInteger("Port", 1883) 33 | private val ws = config.getBoolean("Websocket", false) 34 | private val username = config.getString("Username", "") 35 | private val password = config.getString("Password", "") 36 | private val maxMessageSize = config.getInteger("MaxMessageSizeKb", 8) * 1024 37 | 38 | private val options = MqttServerOptions() 39 | .setPort(port) 40 | .setHost(host) 41 | .setMaxMessageSize(maxMessageSize) 42 | .setUseWebSocket(ws) 43 | 44 | private lateinit var server : io.vertx.mqtt.MqttServer 45 | 46 | init { 47 | logger.level = Level.parse(config.getString("LogLevel", "INFO")) 48 | } 49 | 50 | override fun start(startPromise: Promise) { 51 | super.start() 52 | server = MqttServer.create(vertx, options) 53 | server.endpointHandler { 54 | try { 55 | val authUsername = it.auth()?.username ?: "" 56 | val authPassword = it.auth()?.password ?: "" 57 | if ((username == "" || username == authUsername) && 58 | (password == "" || password == authPassword)) 59 | vertx.deployVerticle(MqttServerEndpoint(logger, it)) 60 | else 61 | logger.warning("Unauthorized access! [${authUsername}] [${authPassword}]") 62 | } catch (e: Exception) { 63 | e.printStackTrace() 64 | } 65 | } 66 | 67 | server.listen { result -> 68 | if (result.succeeded()) { 69 | logger.info("MQTT server started and listening on port " + server.actualPort() + " "+(if (ws) "Websocket" else "")+" MaxMessageSize: "+maxMessageSize) 70 | startPromise.complete() 71 | } else { 72 | logger.severe("MQTT server error on start" + result.cause().message) 73 | startPromise.fail(result.cause()) 74 | } 75 | } 76 | } 77 | 78 | override fun stop(stopPromise: Promise) { 79 | super.stop() 80 | server.close() 81 | stopPromise.complete() 82 | } 83 | 84 | override fun getComponentGroup() = ComponentGroup.Server 85 | 86 | override fun getComponentId(): String { 87 | return id 88 | } 89 | 90 | override fun getComponentConfig(): JsonObject { 91 | return this.config 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/opcua/OpcUaServer.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua 2 | 3 | import at.rocworks.gateway.core.data.DataPoint 4 | import at.rocworks.gateway.core.data.EventBus 5 | import at.rocworks.gateway.core.data.Topic 6 | import at.rocworks.gateway.core.service.Component 7 | import at.rocworks.gateway.core.service.ComponentLogger 8 | import at.rocworks.gateway.core.opcua.server.OpcUaServerInstance 9 | import at.rocworks.gateway.core.service.ServiceHandler 10 | 11 | import io.vertx.core.Promise 12 | import io.vertx.core.eventbus.Message 13 | import io.vertx.core.eventbus.MessageConsumer 14 | import io.vertx.core.json.JsonObject 15 | import io.vertx.servicediscovery.Status 16 | import java.util.concurrent.ArrayBlockingQueue 17 | import java.util.concurrent.TimeUnit 18 | import java.util.concurrent.atomic.AtomicBoolean 19 | import java.util.logging.Level 20 | import kotlin.concurrent.thread 21 | 22 | class OpcUaServer(config: JsonObject): Component(config) { 23 | private val id: String = config.getString("Id", "") 24 | private val logger = ComponentLogger.getLogger(this::class.java.simpleName, id) 25 | private val eventBus = EventBus(logger) 26 | 27 | private val topics : List 28 | private val services : List> 29 | 30 | val bindPort: Int = config.getInteger("Port", 4840) 31 | val bindPath: String = config.getString("Path", "server") 32 | 33 | val manufacturerName = "Rocworks" 34 | val productName = "Automation Gateway OPC UA Server" 35 | val productUri = "https://github.com/vogler75/automation-gateway" 36 | val softwareVersion = "1" 37 | val buildNumber = "1" 38 | val buildDate= "" 39 | 40 | val bindAddresses: List 41 | var endpointAddresses: List = listOf() 42 | val securityPolicies = listOf("None","Basic128Rsa15","Basic256","Basic256Sha256","Aes128_Sha256_RsaOaep","Aes256_Sha256_RsaPss") 43 | 44 | private var serverInstance : OpcUaServerInstance? = null 45 | 46 | private val messageConsumers = mutableListOf>() 47 | 48 | private val writeValueQueueSize = 100 49 | private var writeValueQueueFull = false 50 | private val writeValueQueue = ArrayBlockingQueue(100) 51 | private var writeValueThread : Thread? = null 52 | private val writeValueStop = AtomicBoolean(false) 53 | 54 | fun writeValue(data: DataPoint) { 55 | try { 56 | writeValueQueue.add(data) 57 | if (writeValueQueueFull) { 58 | writeValueQueueFull = false 59 | logger.warning("Write queue not full anymore. [${writeValueQueue.size}]") 60 | } 61 | } catch (e: IllegalStateException) { 62 | if (!writeValueQueueFull) { 63 | writeValueQueueFull = true 64 | logger.warning("Write queue is full! [${writeValueQueueSize}]") 65 | } 66 | } 67 | } 68 | 69 | private fun writerThread() = thread(start = true) { 70 | logger.info("Writer thread with queue size [${writeValueQueue.remainingCapacity()}]") 71 | writeValueStop.set(false) 72 | while (!writeValueStop.get()) { 73 | writeValueQueue.poll(10, TimeUnit.MILLISECONDS)?.let { data -> 74 | eventBus.requestPublishDataPoint(vertx, data) 75 | } 76 | } 77 | } 78 | 79 | init { 80 | topics = config 81 | .getJsonArray("Topics") 82 | ?.asSequence() 83 | ?.filterIsInstance() 84 | ?.mapNotNull { it.getString("Topic") } 85 | ?.map { Topic.parseTopic(it) } 86 | ?.filter { it.dataFormat == Topic.Format.Json } 87 | ?.toList() 88 | ?:listOf() 89 | 90 | bindAddresses = config.getJsonArray("BindAddresses")?.filterIsInstance() 91 | ?: listOf("0.0.0.0") 92 | 93 | services = topics.map { Pair(it.systemType, it.systemName) }.distinct() 94 | 95 | logger.level = Level.parse(config.getString("LogLevel", "INFO")) 96 | logger.info("BindAddresses: ${bindAddresses.joinToString(", ")}") 97 | } 98 | 99 | override fun getComponentGroup(): ComponentGroup { 100 | return ComponentGroup.Server 101 | } 102 | 103 | override fun getComponentId(): String { 104 | return id 105 | } 106 | 107 | override fun getComponentConfig(): JsonObject { 108 | return config 109 | } 110 | 111 | override fun start(startPromise: Promise) { 112 | super.start() 113 | thread { 114 | // we must do this in a thread because getHostnames takes some time, and we don't want to block the verticle 115 | endpointAddresses = config.getJsonArray("EndpointAddresses")?.filterIsInstance() 116 | ?: HostnameUtil.getHostnames(HostnameUtil.getHostname()).toList() 117 | logger.info("EndpointAddresses: ${endpointAddresses.joinToString(", ")}") 118 | 119 | // start the writer thread 120 | writeValueThread = writerThread() 121 | 122 | // start the server instance 123 | serverInstance = OpcUaServerInstance(this).also { 124 | it.startup() 125 | subscribeTopics() 126 | startPromise.complete() 127 | } 128 | } 129 | } 130 | 131 | override fun stop(stopPromise: Promise) { 132 | super.stop() 133 | unsubscribeTopics() 134 | writeValueStop.set(true) 135 | serverInstance?.shutdown() 136 | stopPromise.complete() 137 | } 138 | 139 | private fun subscribeTopics() { // TODO: same in LoggerBase.kt 140 | val handler = ServiceHandler(vertx, logger) 141 | fun onComplete(ok: Boolean, consumer: MessageConsumer) { 142 | if (ok) messageConsumers.add(consumer) 143 | } 144 | fun onMessage(@Suppress("UNUSED_PARAMETER") topic: Topic, message: Message) { 145 | valueConsumerDataPoint(message.body()) 146 | } 147 | services.forEach { it -> 148 | handler.observeService(it.first.name, it.second) { service -> 149 | logger.info("Service [${service.name}] changed status [${service.status}]") 150 | if (service.status == Status.UP) { 151 | topics 152 | .filter { it.systemType.name == service.type && it.systemName == service.name } 153 | .forEach { topic -> eventBus.requestSubscribeTopic(vertx, this.id, topic, ::onComplete,::onMessage) } 154 | } 155 | } 156 | } 157 | } 158 | 159 | private fun unsubscribeTopics() { 160 | messageConsumers.forEach { it.unregister() } 161 | eventBus.requestUnsubscribeTopics(vertx, this.id, topics) { _, _ -> } 162 | } 163 | 164 | private fun valueConsumerDataPoint(data: DataPoint) { 165 | try { 166 | val topic = data.topic 167 | val value = data.value 168 | if (value.hasNoValue()) return 169 | logger.finest { "Got value $topic $value" } 170 | serverInstance?.gatewayNodes?.setDataPoint(data) 171 | } catch (e: Exception) { 172 | e.printStackTrace() 173 | } 174 | } 175 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/opcua/driver/OpcUaConfig.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua.driver 2 | 3 | import org.eclipse.milo.opcua.sdk.client.api.identity.AnonymousProvider 4 | import org.eclipse.milo.opcua.stack.core.security.SecurityPolicy 5 | import org.eclipse.milo.opcua.stack.core.types.structured.EndpointDescription 6 | import org.eclipse.milo.opcua.sdk.client.api.identity.IdentityProvider 7 | import org.eclipse.milo.opcua.sdk.client.api.identity.UsernameProvider 8 | import java.util.function.Predicate 9 | 10 | data class OpcUaConfig( 11 | val endpointUrl: String, 12 | val securityPolicy: SecurityPolicy?, 13 | val identityProvider: IdentityProvider = AnonymousProvider() 14 | ) { 15 | fun endpointFilter(): Predicate { 16 | return Predicate { e: EndpointDescription -> 17 | (securityPolicy == null) || (e.securityPolicyUri == securityPolicy.uri) 18 | } 19 | } 20 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/opcua/driver/OpcUaMonitoredItem.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua.driver 2 | 3 | import at.rocworks.gateway.core.driver.MonitoredItem 4 | import org.eclipse.milo.opcua.sdk.client.api.subscriptions.UaMonitoredItem 5 | 6 | class OpcUaMonitoredItem(override val item: UaMonitoredItem) : MonitoredItem() { 7 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/opcua/server/OpcUaNamespace.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua.server 2 | 3 | import org.eclipse.milo.opcua.sdk.core.AccessLevel 4 | import org.eclipse.milo.opcua.sdk.core.Reference 5 | import org.eclipse.milo.opcua.sdk.server.Lifecycle 6 | import org.eclipse.milo.opcua.sdk.server.LifecycleManager 7 | import org.eclipse.milo.opcua.sdk.server.OpcUaServer as MiloOpcUaServer 8 | import org.eclipse.milo.opcua.sdk.server.api.AddressSpaceComposite 9 | import org.eclipse.milo.opcua.sdk.server.api.Namespace 10 | import org.eclipse.milo.opcua.sdk.server.nodes.UaFolderNode 11 | import org.eclipse.milo.opcua.sdk.server.nodes.UaNode 12 | import org.eclipse.milo.opcua.sdk.server.nodes.UaNodeContext 13 | import org.eclipse.milo.opcua.sdk.server.nodes.UaVariableNode 14 | import org.eclipse.milo.opcua.stack.core.BuiltinDataType 15 | import org.eclipse.milo.opcua.stack.core.Identifiers 16 | import org.eclipse.milo.opcua.stack.core.types.builtin.* 17 | import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UShort 18 | import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.Unsigned 19 | import java.util.* 20 | 21 | class OpcUaNamespace(server: MiloOpcUaServer) : AddressSpaceComposite(server), Namespace, Lifecycle { 22 | 23 | companion object { 24 | const val NAMESPACE_URI: String = "urn:rocworks:automation-gateway:opcua:server" 25 | } 26 | 27 | private val lifecycleManager = LifecycleManager() 28 | 29 | private val namespaceIndex: UShort = server.namespaceTable.addUri(NAMESPACE_URI) 30 | 31 | init { 32 | lifecycleManager.addLifecycle(object : Lifecycle { 33 | override fun startup() { 34 | server.addressSpaceManager.register(this@OpcUaNamespace) 35 | } 36 | 37 | override fun shutdown() { 38 | server.addressSpaceManager.unregister(this@OpcUaNamespace) 39 | } 40 | }) 41 | } 42 | 43 | override fun startup() { 44 | lifecycleManager.startup() 45 | } 46 | 47 | override fun shutdown() { 48 | lifecycleManager.shutdown() 49 | } 50 | 51 | override fun getNamespaceUri(): String { 52 | return NAMESPACE_URI 53 | } 54 | 55 | override fun getNamespaceIndex(): UShort { 56 | return namespaceIndex 57 | } 58 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/opcua/server/OpcUaSampledNode.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua.server 2 | 3 | import at.rocworks.gateway.core.service.ComponentLogger 4 | import io.vertx.core.Vertx 5 | import org.eclipse.milo.opcua.sdk.server.AbstractLifecycle 6 | import org.eclipse.milo.opcua.sdk.server.api.DataItem 7 | import org.eclipse.milo.opcua.sdk.server.nodes.AttributeContext 8 | import org.eclipse.milo.opcua.sdk.server.nodes.UaNode 9 | import org.eclipse.milo.opcua.stack.core.StatusCodes 10 | import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue 11 | import org.eclipse.milo.opcua.stack.core.types.enumerated.TimestampsToReturn 12 | 13 | class OpcUaSampledNode( 14 | private val vertx: Vertx, 15 | private val item: DataItem, 16 | private val node: UaNode 17 | ) : AbstractLifecycle() { 18 | private val logger = ComponentLogger.getLogger(this::class.java.simpleName) 19 | 20 | @Volatile 21 | var samplingEnabled: Boolean = true 22 | 23 | private var tick: Long = 0L 24 | 25 | override fun onStartup() { 26 | item.setValue(sampleCurrentValue()) 27 | logger.finest { "onStartup: ${node.nodeId} ${item.samplingInterval}" } 28 | tick = vertx.setPeriodic(item.samplingInterval.toLong()) { 29 | tick() 30 | } 31 | } 32 | 33 | override fun onShutdown(): Unit = synchronized(this) { 34 | logger.finest { "onShutdown: ${node.nodeId}" } 35 | vertx.cancelTimer(tick) 36 | } 37 | 38 | private fun tick() { 39 | if (samplingEnabled) { 40 | try { 41 | item.setValue(sampleCurrentValue()) 42 | } catch (t: Throwable) { 43 | logger.severe("Error sampling value for ${item.readValueId}: $t") 44 | item.setValue(DataValue(StatusCodes.Bad_InternalError)) 45 | } 46 | } 47 | } 48 | 49 | fun modifyRate(newRate: Double) { 50 | logger.fine { "modifyRate: ${node.nodeId} $newRate" } 51 | vertx.cancelTimer(tick) 52 | vertx.setPeriodic(newRate.toLong()) { 53 | tick() 54 | } 55 | } 56 | 57 | private fun sampleCurrentValue(): DataValue { 58 | return node.readAttribute( 59 | AttributeContext(item.session.server), 60 | item.readValueId.attributeId, 61 | TimestampsToReturn.Both, 62 | item.readValueId.indexRange, 63 | item.readValueId.dataEncoding 64 | ) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/opcua/server/OpcUaSampledSpace.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua.server 2 | 3 | import at.rocworks.gateway.core.opcua.OpcUaServer 4 | import at.rocworks.gateway.core.service.ComponentLogger 5 | import com.google.common.collect.Maps 6 | import org.eclipse.milo.opcua.sdk.server.OpcUaServer as MiloOpcUaServer 7 | import org.eclipse.milo.opcua.sdk.server.api.* 8 | import org.eclipse.milo.opcua.sdk.server.nodes.UaNode 9 | import org.eclipse.milo.opcua.stack.core.types.builtin.NodeId 10 | import java.util.concurrent.ConcurrentMap 11 | 12 | abstract class OpcUaSampledSpace( 13 | private val config: OpcUaServer, 14 | server: MiloOpcUaServer, 15 | composite: AddressSpaceComposite 16 | ) : ManagedAddressSpaceFragmentWithLifecycle(server, composite) { 17 | private val logger = ComponentLogger.getLogger(this::class.java.simpleName) 18 | 19 | private val filter = SimpleAddressSpaceFilter.create { 20 | nodeManager.containsNode(it) 21 | } 22 | 23 | private val sampledNodes: ConcurrentMap = Maps.newConcurrentMap() 24 | 25 | override fun getFilter(): AddressSpaceFilter { 26 | return filter 27 | } 28 | 29 | override fun onDataItemsCreated(items: List) { 30 | items.forEach { item -> 31 | logger.fine { "onDataItemsCreated: ${item.readValueId.nodeId}" } 32 | val nodeId: NodeId = item.readValueId.nodeId 33 | val node: UaNode? = nodeManager.get(nodeId) 34 | 35 | if (node != null) { 36 | val sampledNode = OpcUaSampledNode(config.vertx, item, node) 37 | sampledNode.samplingEnabled = item.isSamplingEnabled 38 | sampledNode.startup() 39 | 40 | sampledNodes[item] = sampledNode 41 | } 42 | } 43 | } 44 | 45 | override fun onDataItemsModified(items: List) { 46 | items.forEach { item -> 47 | logger.fine { "onDataItemsModified: ${item.readValueId.nodeId}" } 48 | sampledNodes[item]?.modifyRate(item.samplingInterval) 49 | } 50 | } 51 | 52 | override fun onDataItemsDeleted(items: List) { 53 | items.forEach { item -> 54 | logger.fine { "onDataItemsDeleted: ${item.readValueId.nodeId}" } 55 | sampledNodes.remove(item)?.shutdown() 56 | } 57 | } 58 | 59 | override fun onMonitoringModeChanged(items: List) { 60 | items.forEach { item -> 61 | logger.fine { "onMonitoringModeChanged: ${item.readValueId.nodeId} ${item.isSamplingEnabled}" } 62 | sampledNodes[item]?.samplingEnabled = item.isSamplingEnabled 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/opcua/server/OpcUaServerLimits.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.opcua.server 2 | 3 | import org.eclipse.milo.opcua.sdk.server.api.config.OpcUaServerConfigLimits 4 | import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger 5 | import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.Unsigned.uint 6 | 7 | 8 | object OpcUaServerLimits : OpcUaServerConfigLimits { 9 | 10 | override fun getMaxSessionCount(): UInteger { 11 | return uint(1000) 12 | } 13 | 14 | override fun getMinPublishingInterval(): Double { 15 | return 100.0 16 | } 17 | 18 | override fun getDefaultPublishingInterval(): Double { 19 | return 100.0 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/service/Component.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.service 2 | 3 | import at.rocworks.gateway.core.graphql.GraphQLServer 4 | import at.rocworks.gateway.core.mqtt.MqttDriver 5 | import at.rocworks.gateway.logger.MqttLogger 6 | import at.rocworks.gateway.core.mqtt.MqttServer 7 | import at.rocworks.gateway.core.opcua.OpcUaDriver 8 | import at.rocworks.gateway.core.opcua.OpcUaServer 9 | import io.vertx.core.AbstractVerticle 10 | import io.vertx.core.json.JsonObject 11 | 12 | abstract class Component(val config: JsonObject) : AbstractVerticle() { 13 | enum class ComponentStatus { 14 | None, Enabled, Disabled 15 | } 16 | 17 | enum class ComponentGroup { 18 | None, Server, Driver, Logger 19 | } 20 | 21 | enum class ComponentType { 22 | None, 23 | GraphQLServer, MqttServer, OpcUaServer, 24 | OpcUaDriver, MqttDriver, Plc4xDriver, 25 | InfluxDBLogger, 26 | QuestDBLogger, 27 | IoTDBLogger, 28 | JdbcLogger, 29 | KafkaLogger, 30 | MqttLogger, 31 | Neo4jLogger, 32 | OpenSearchLogger, 33 | ImplyLogger 34 | } 35 | 36 | fun getComponentType() : ComponentType = ComponentType.valueOf(this.javaClass.simpleName) 37 | abstract fun getComponentGroup(): ComponentGroup 38 | abstract fun getComponentId(): String 39 | abstract fun getComponentConfig(): JsonObject 40 | 41 | fun getComponentName() = this.javaClass.simpleName+(if (getComponentId().isNotEmpty()) "/${getComponentId()}" else "") 42 | 43 | open fun getComponentStatus(): ComponentStatus = ComponentStatus.None 44 | 45 | override fun start() { 46 | super.start() 47 | ComponentHandler.componentStarted(this) // TODO: do this via message bus? 48 | } 49 | 50 | override fun stop() { 51 | super.stop() 52 | ComponentHandler.componentStopped(this) // TODO: do this via message bus? 53 | } 54 | 55 | companion object { 56 | fun defaultFactory(type: Component.ComponentType, config: JsonObject): Component? { 57 | return when (type) { 58 | ComponentType.GraphQLServer -> GraphQLServer(config) 59 | ComponentType.OpcUaServer -> OpcUaServer(config) 60 | ComponentType.OpcUaDriver -> OpcUaDriver(config) 61 | ComponentType.MqttServer -> MqttServer(config) 62 | ComponentType.MqttDriver -> MqttDriver(config) 63 | ComponentType.MqttLogger -> MqttLogger(config) 64 | else -> null 65 | } 66 | } 67 | } 68 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/service/ComponentHandler.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.service 2 | 3 | import io.vertx.core.Vertx 4 | import io.vertx.core.json.JsonArray 5 | import io.vertx.core.json.JsonObject 6 | import java.util.* 7 | import java.util.logging.Logger 8 | 9 | class ComponentHandler( 10 | private val vertx : Vertx, 11 | private val config: JsonObject, 12 | private val factory: (Component.ComponentType, JsonObject) -> Component?) 13 | { 14 | data class ComponentRecord( 15 | val id: String, 16 | val type: Component.ComponentType, 17 | val config: JsonObject, 18 | var component: Component? 19 | ) { 20 | fun isEnabled(): Boolean = config.getBoolean("Enabled", false) 21 | val group: Component.ComponentGroup 22 | get() = when (type) { 23 | Component.ComponentType.GraphQLServer, 24 | Component.ComponentType.OpcUaServer, 25 | Component.ComponentType.MqttServer -> Component.ComponentGroup.Server 26 | Component.ComponentType.OpcUaDriver, 27 | Component.ComponentType.MqttDriver, 28 | Component.ComponentType.Plc4xDriver -> Component.ComponentGroup.Driver 29 | Component.ComponentType.InfluxDBLogger, 30 | Component.ComponentType.QuestDBLogger, 31 | Component.ComponentType.IoTDBLogger, 32 | Component.ComponentType.JdbcLogger, 33 | Component.ComponentType.KafkaLogger, 34 | Component.ComponentType.MqttLogger, 35 | Component.ComponentType.Neo4jLogger, 36 | Component.ComponentType.OpenSearchLogger, 37 | Component.ComponentType.ImplyLogger -> Component.ComponentGroup.Logger 38 | Component.ComponentType.None -> Component.ComponentGroup.None 39 | } 40 | } 41 | 42 | companion object { 43 | private val logger = Logger.getLogger(ComponentHandler::class.java.simpleName) 44 | private val components = mutableMapOf() 45 | 46 | fun componentStarted(component: Component) { 47 | logger.info("Component ${component.getComponentName()} started.") 48 | } 49 | 50 | fun componentStopped(component: Component) { 51 | logger.info("Component ${component.getComponentName()} stopped.") 52 | } 53 | } 54 | 55 | init { 56 | listOf("Drivers", "Servers", "Loggers").forEach { group -> 57 | config.getJsonObject(group) 58 | ?.filter { it.value is JsonArray } 59 | ?.forEach { (type, list) -> 60 | (list as JsonArray) 61 | .filterIsInstance() 62 | .forEach { config -> 63 | val name = type + (group.removeSuffix("s")) 64 | val componentType = try { 65 | Component.ComponentType.valueOf(name) 66 | } catch (e: IllegalArgumentException) { 67 | logger.severe("Unknown component type [$name]!") 68 | Component.ComponentType.None 69 | } 70 | createComponent(componentType, config) 71 | } 72 | } 73 | } 74 | } 75 | 76 | fun getConfig(): JsonObject { 77 | val drivers = JsonObject() 78 | val servers = JsonObject() 79 | val loggers = JsonObject() 80 | 81 | fun add(group: JsonObject, component: ComponentRecord) { 82 | val key = component.type.name.removeSuffix(component.group.name) 83 | if (group.containsKey(key)) 84 | group.getJsonArray(key).add(component.config) 85 | else 86 | group.put(key, JsonArray().add(component.config)) 87 | } 88 | 89 | components.forEach { (_, component) -> 90 | when (component.group) { 91 | Component.ComponentGroup.Server -> add(servers, component) 92 | Component.ComponentGroup.Driver -> add(drivers, component) 93 | Component.ComponentGroup.Logger -> add(loggers, component) 94 | else -> TODO() 95 | } 96 | } 97 | 98 | return JsonObject() 99 | .put("Drivers", drivers) 100 | .put("Servers", servers) 101 | .put("Loggers", loggers) 102 | } 103 | 104 | fun getComponents(): List { 105 | return components.map { it.value } 106 | } 107 | 108 | fun createComponent(type: Component.ComponentType, config: JsonObject) { 109 | val enabled = config.getBoolean("Enabled", true) 110 | config.put("Enabled", enabled) 111 | 112 | // Get component id 113 | val componentId = config.getString("Id", UUID.randomUUID().toString()) 114 | config.put("Id", componentId) 115 | 116 | // Add component 117 | if (type != Component.ComponentType.None) { 118 | val component = if (enabled) { factory(type, config) } else null 119 | val record = ComponentRecord(componentId, type, config, component) 120 | components["$type/$componentId"] = record 121 | if (enabled) deployComponent(type, componentId) 122 | } 123 | } 124 | 125 | fun deleteComponent(type: Component.ComponentType, id: String) { 126 | val key = "$type/$id" 127 | val record = components[key] 128 | if (record != null) { 129 | undeployComponent(type, id) 130 | components.remove(key) 131 | } 132 | } 133 | 134 | fun deployComponent(type: Component.ComponentType, id: String) { 135 | var record = components["$type/$id"] 136 | if (record != null && record.component != null) { 137 | val component = record.component!! 138 | vertx.deployVerticle(component) { result -> 139 | if (result.succeeded()) { 140 | logger.info("Component ${component.getComponentName()} started successfully") 141 | } else { 142 | logger.severe("Failed to start component: " + result.cause()) 143 | } 144 | } 145 | } 146 | } 147 | 148 | fun undeployComponent(type: Component.ComponentType, id: String) { 149 | val record = components["$type/$id"] 150 | if (record != null && record.component != null) { 151 | vertx.undeploy(record.component!!.deploymentID()) { result -> 152 | if (result.succeeded()) { 153 | logger.info("Component stopped successfully") 154 | } else { 155 | logger.severe("Failed to stop component: " + result.cause()) 156 | } 157 | } 158 | } 159 | } 160 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/service/ComponentLogger.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.service 2 | 3 | import java.util.LinkedList 4 | import java.util.concurrent.ConcurrentLinkedQueue 5 | import java.util.logging.Handler 6 | import java.util.logging.LogRecord 7 | import java.util.logging.Logger 8 | 9 | class ComponentLogger(private val className: String, private val id: String) : Handler() { 10 | private val max = 1000 11 | private val log = ConcurrentLinkedQueue() 12 | 13 | companion object { 14 | private val loggers = mutableMapOf() 15 | 16 | private fun getId(className: String, id: String="") = "$className"+(if (id.isNotEmpty()) "/$id" else "") 17 | 18 | fun getLogger(className: String, id: String=""): Logger { 19 | val logger = Logger.getLogger(getId(className, id)) 20 | val memoryHandler = ComponentLogger(className, id) 21 | logger.addHandler(memoryHandler) 22 | return logger 23 | } 24 | 25 | private fun addLogger(className: String, id: String, logger: ComponentLogger) = loggers.put(getId(className, id), logger) 26 | private fun delLogger(className: String, id: String) = loggers.remove(getId(className, id)) 27 | 28 | fun getMessages(className: String, id: String, last: Int): List { 29 | return loggers[getId(className, id)]?.getMessages(last) ?: listOf() 30 | } 31 | } 32 | 33 | init { 34 | addLogger(className, id, this) 35 | } 36 | 37 | override fun publish(record: LogRecord) { 38 | log.add(record) 39 | if (log.size > max) 40 | log.remove() 41 | } 42 | 43 | override fun flush() { 44 | log.clear() 45 | } 46 | 47 | override fun close() { 48 | log.clear() 49 | delLogger(className, id) 50 | } 51 | 52 | fun getMessages(last: Int): List { 53 | return if (last<=0) { 54 | log.toList() 55 | } 56 | else { 57 | log.toList().takeLast(last) 58 | } 59 | } 60 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/service/ServiceHandler.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.service 2 | 3 | import io.vertx.core.Future 4 | import io.vertx.core.Promise 5 | import io.vertx.core.Vertx 6 | import io.vertx.core.json.JsonObject 7 | import io.vertx.servicediscovery.Record 8 | import io.vertx.servicediscovery.ServiceDiscovery 9 | 10 | import java.util.logging.Logger 11 | 12 | class ServiceHandler(val vertx: Vertx, val logger: Logger) { 13 | private val discovery = ServiceDiscovery.create(vertx)!! 14 | private val observers : MutableMapUnit> = mutableMapOf() 15 | 16 | init { 17 | onServiceChanged { 18 | observers[idOf(it)]?.invoke(it) 19 | } 20 | } 21 | 22 | fun observeService(type: String, name: String, result: (record: Record)->Unit) { 23 | observers[idOf(type, name)] = result 24 | getServiceRecord(type, name).onComplete { 25 | if (it.result() != null) result(it.result()) 26 | } 27 | } 28 | 29 | fun unobserveService(type: String, name: String) { 30 | observers.remove(idOf(type, name)) 31 | } 32 | 33 | fun registerService(type: String, name: String, endpoint: String): Future { 34 | val promise = Promise.promise() 35 | val record = Record() 36 | .setType(type) 37 | .setName(name) 38 | .setLocation(JsonObject() 39 | .put("endpoint", endpoint)) 40 | discovery.publish(record, promise) 41 | return promise.future() 42 | } 43 | 44 | companion object { 45 | fun idOf(record: Record) = "${record.type}/${record.name}" 46 | fun idOf(type: String, name: String) = "$type/$name" 47 | fun endpointOf(record: Record) = record.location.getString("endpoint", "")!! 48 | } 49 | 50 | private fun onServiceChanged(result: (record: Record)->Unit) { 51 | vertx.eventBus().consumer(discovery.options().announceAddress) { message -> 52 | result(Record(message.body())) 53 | } 54 | } 55 | 56 | private fun getServiceRecord(type: String, name: String): Future { 57 | val promise = Promise.promise() 58 | discovery.getRecord({ r -> r.name == name && r.type == type }) { record -> 59 | if (record.succeeded() && record.result() != null) { 60 | promise.complete(record.result()) 61 | } else { 62 | promise.complete(null) 63 | } 64 | } 65 | return promise.future() 66 | } 67 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/core/service/WebConfig.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.core.service 2 | 3 | import io.vertx.core.AbstractVerticle 4 | import io.vertx.core.Vertx 5 | import io.vertx.core.http.HttpServer 6 | import io.vertx.ext.web.Router 7 | import io.vertx.ext.web.handler.BodyHandler 8 | import java.nio.file.Files 9 | import java.nio.file.Paths 10 | import java.nio.file.StandardCopyOption 11 | import java.util.logging.Logger 12 | 13 | class WebConfig(private val port: Int, configFileName: String) : AbstractVerticle() { 14 | private val logger = Logger.getLogger(javaClass.simpleName) 15 | private val configFileName = configFileName.substringBefore(".")+".yaml" // only YAML 16 | 17 | override fun start() { 18 | println("ConfigFileName $configFileName") 19 | createHttpServer(vertx, port) 20 | } 21 | 22 | override fun stop() { 23 | super.stop() 24 | } 25 | 26 | private fun ensureYamlExtension(configFileName: String): String { 27 | // Regex to check if the file has an extension 28 | val regex = Regex(".*\\.[a-zA-Z0-9]+$") 29 | 30 | return if (regex.matches(configFileName)) { 31 | // If the file has an extension, return it as is 32 | configFileName 33 | } else { 34 | // If the file does not have an extension, add .yaml to it 35 | "$configFileName.yaml" 36 | } 37 | } 38 | 39 | private fun createHttpServer(vertx: Vertx, port: Int) : HttpServer { 40 | val index = """ 41 | 42 | 43 | 44 | 45 | File Upload 46 | 47 | 48 |

Upload Config File

49 |
50 | 51 | 52 |
53 | 54 | 55 | """.trimIndent() 56 | val router = Router.router(vertx) 57 | 58 | // Serve the static HTML page 59 | router.get("/").handler { ctx -> 60 | ctx.response() 61 | .putHeader("Content-Type", "text/html") 62 | .end(index) 63 | } 64 | 65 | // Handle file uploads 66 | router.route().handler(BodyHandler.create().setUploadsDirectory("config")) 67 | 68 | router.post("/upload").handler { ctx -> 69 | val upload = ctx.fileUploads().iterator().next() 70 | val uploadedFileName = upload.uploadedFileName() 71 | val fileName = upload.fileName() 72 | 73 | if (fileName.isEmpty()) { 74 | ctx.response() 75 | .putHeader("Content-Type", "text/html") 76 | .end(""" 77 | 78 | 79 |

Error: No file selected

80 | 81 | 82 | 83 | """.trimIndent()) 84 | } else { 85 | // Move the file to a new location 86 | val targetPath = Paths.get(ensureYamlExtension(configFileName)) 87 | Files.move(Paths.get(uploadedFileName), targetPath, StandardCopyOption.REPLACE_EXISTING) 88 | ctx.response() 89 | .putHeader("Content-Type", "text/html") 90 | .end(""" 91 | 92 | 93 |

File uploaded to ${targetPath.toAbsolutePath()}

94 | 95 | 96 | 97 | """.trimIndent()) 98 | } 99 | } 100 | 101 | val server = vertx.createHttpServer().requestHandler(router).listen(port) { 102 | if (it.succeeded()) { 103 | logger.info("HTTP Server started on port $port") 104 | } else { 105 | logger.warning("Failed to start HTTP Server: ${it.cause()}") 106 | } 107 | } 108 | 109 | return server 110 | } 111 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/logger/KafkaLogger.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.logger 2 | 3 | import at.rocworks.gateway.core.data.Topic 4 | import at.rocworks.gateway.core.logger.LoggerPublisher 5 | import io.vertx.core.Future 6 | import io.vertx.core.Promise 7 | import io.vertx.core.buffer.Buffer 8 | import io.vertx.core.json.JsonObject 9 | 10 | import io.vertx.kafka.client.producer.KafkaProducer 11 | import io.vertx.kafka.client.producer.KafkaProducerRecord 12 | 13 | class KafkaLogger(config: JsonObject) : LoggerPublisher(config) { 14 | private val servers = config.getString("Servers", "localhost:9092") 15 | private val configs = config.getJsonObject("Configs") 16 | private val topicName = config.getString("TopicName", "Gateway") 17 | private val keyName = config.getString("KeyName", null) 18 | 19 | @Volatile 20 | private var producer: KafkaProducer? = null 21 | 22 | override fun open(): Future { 23 | val result = Promise.promise() 24 | try { 25 | val config: MutableMap = HashMap() 26 | config["bootstrap.servers"] = servers 27 | 28 | config["key.serializer"] = "org.apache.kafka.common.serialization.StringSerializer" 29 | config["value.serializer"] = "org.apache.kafka.common.serialization.StringSerializer" 30 | 31 | configs?.forEach { 32 | logger.info("Kafka config: ${it.key}=${it.value}") 33 | config[it.key] = it.value.toString() 34 | } 35 | 36 | producer = KafkaProducer.create(vertx, config) 37 | logger.info("Kafka connected.") 38 | result.complete() 39 | } catch (e: Exception) { 40 | logger.severe("Kafka connect failed! [${e.message}]", ) 41 | e.printStackTrace() 42 | result.fail(e) 43 | } 44 | return result.future() 45 | } 46 | 47 | override fun close(): Future { 48 | val promise = Promise.promise() 49 | producer?.close() 50 | producer = null 51 | promise.complete() 52 | return promise.future() 53 | } 54 | 55 | override fun isEnabled(): Boolean { 56 | return producer != null 57 | } 58 | 59 | override fun publish(topic: Topic, payload: Buffer) { 60 | val destination = topicName?:topic.systemName 61 | val key = keyName?:topic.getBrowsePathOrNode().toString() 62 | val record = KafkaProducerRecord.create(destination, key, payload.toString()) 63 | producer?.write(record)?.onComplete { 64 | valueCounterOutput++ 65 | }?.onFailure { 66 | logger.severe("Error writing record [${it.message}]") 67 | } 68 | } 69 | 70 | override fun publish(topics: List, payload: Buffer) { 71 | val record = KafkaProducerRecord.create(topicName, payload.toString()) 72 | producer?.write(record)?.onComplete { 73 | valueCounterOutput+=topics.size 74 | }?.onFailure { 75 | logger.severe("Error writing record [${it.message}]") 76 | } 77 | } 78 | } -------------------------------------------------------------------------------- /source/lib-core/src/main/kotlin/at/rocworks/gateway/logger/MqttLogger.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.logger 2 | 3 | import at.rocworks.gateway.core.data.Topic 4 | import at.rocworks.gateway.core.logger.LoggerPublisher 5 | import io.netty.handler.codec.mqtt.MqttQoS 6 | import io.vertx.core.Future 7 | import io.vertx.core.Promise 8 | import io.vertx.core.buffer.Buffer 9 | import io.vertx.core.json.JsonObject 10 | import io.vertx.mqtt.MqttClient 11 | import io.vertx.mqtt.MqttClientOptions 12 | 13 | import java.util.* 14 | 15 | class MqttLogger (config: JsonObject) : LoggerPublisher(config) { 16 | private var client : MqttClient? = null 17 | 18 | private val configMqtt = config.getJsonObject("Mqtt", config) 19 | private val port: Int = configMqtt.getInteger("Port", 1883) 20 | private val host: String = configMqtt.getString("Host", "localhost") 21 | private val username: String? = configMqtt.getString("Username") 22 | private val password: String? = configMqtt.getString("Password") 23 | private val clientId: String = configMqtt.getString("ClientId", UUID.randomUUID().toString()) 24 | private val cleanSession: Boolean = configMqtt.getBoolean("CleanSession", true) 25 | private val ssl: Boolean = configMqtt.getBoolean("Ssl", false) 26 | private val trustAll: Boolean = configMqtt.getBoolean("TrustAll", true) 27 | private val qos: Int = configMqtt.getInteger("Qos", 0) 28 | private val retained: Boolean = configMqtt.getBoolean("Retained", false) 29 | private val baseTopic: String = configMqtt.getString("Topic", "") 30 | private val maxMessageSizeKb = configMqtt.getInteger("MaxMessageSizeKb", 8) * 1024 31 | 32 | private var enabled = false 33 | 34 | private val topicToTarget : Map = 35 | topicsWithConfig 36 | .filter { it.second.getString("Target") != null } 37 | .associate { it.first.topicName to it.second.getString("Target") } 38 | 39 | override fun open(): Future { 40 | val promise = Promise.promise() 41 | 42 | if (client==null) { 43 | enabled = true 44 | val options = MqttClientOptions() 45 | username?.let { options.username = it } 46 | password?.let { options.password = it } 47 | options.setClientId(clientId) 48 | options.setCleanSession(cleanSession) 49 | options.setSsl(ssl) 50 | options.setTrustAll(trustAll) 51 | options.setMaxMessageSize(maxMessageSizeKb) 52 | client = MqttClient.create(vertx, options) 53 | client!!.closeHandler { 54 | logger.severe("Connection closed!") 55 | if (enabled) reconnect() 56 | } 57 | client!!.exceptionHandler { 58 | logger.severe(it.stackTraceToString()) 59 | } 60 | } 61 | 62 | client!!.connect(port, host) { 63 | logger.info("Mqtt client connect [${it.succeeded()}] [${it.cause() ?: ""}]") 64 | if (it.succeeded()) { 65 | logger.fine { "Connected to MQTT broker." } 66 | promise.complete() 67 | } 68 | else promise.fail("Connect failed!") 69 | } ?: promise.fail("Client is null!") 70 | 71 | return promise.future() 72 | } 73 | 74 | override fun close(): Future { 75 | val promise = Promise.promise() 76 | enabled = false 77 | client!!.disconnect { 78 | promise.complete() 79 | logger.info("Mqtt client disconnect [${it.succeeded()}]") 80 | } 81 | return promise.future() 82 | } 83 | 84 | override fun isEnabled(): Boolean { 85 | return enabled 86 | } 87 | 88 | private fun publish(topic: String, payload: Buffer) { 89 | client!!.publish(topic, payload, MqttQoS.valueOf(qos), false, retained) 90 | valueCounterOutput+=1 91 | } 92 | 93 | override fun publish(topic: Topic, payload: Buffer) { 94 | logger.fine { "Produce External: $topic" } 95 | val targetTopic = if (topicToTarget.containsKey(topic.topicName)) { 96 | val target = topicToTarget[topic.topicName]!! 97 | if (target.endsWith("#") || target.endsWith("+")) { 98 | if (topic.hasBrowsePath && topic.topicPath.endsWith("#") || topic.topicPath.endsWith("+")) { 99 | target.dropLast(1) + topic.getBrowsePathOrNode().toString().removePrefix(topic.topicPath.dropLast(1)) 100 | } else { 101 | target.dropLast(1) + topic.topicNode 102 | } 103 | } 104 | else target 105 | } else if (this.baseTopic.isEmpty()) { 106 | topic.systemName + "/" + topic.getBrowsePathOrNode() 107 | } else { 108 | this.baseTopic + "/" + topic.systemName + "/" + topic.getBrowsePathOrNode() 109 | } 110 | publish(targetTopic, payload) 111 | } 112 | 113 | override fun publish(topics: List, payload: Buffer) { 114 | publish(this.baseTopic, payload) 115 | } 116 | } -------------------------------------------------------------------------------- /source/lib-influxdb/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-library-conventions' 3 | } 4 | 5 | dependencies { 6 | api project(':lib-core') 7 | 8 | implementation "io.vertx:vertx-core:$vertxVersion" 9 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 10 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 11 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 12 | 13 | implementation "org.influxdb:influxdb-java:2.24" 14 | implementation "com.influxdb:influxdb-client-java:6.6.0" 15 | } 16 | -------------------------------------------------------------------------------- /source/lib-influxdb/src/main/kotlin/at/rocworks/gateway/logger/influx/InfluxDBLogger.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.logger.influx 2 | 3 | import at.rocworks.gateway.core.logger.LoggerBase 4 | import io.vertx.core.json.JsonObject 5 | 6 | class InfluxDBLogger { 7 | companion object { 8 | fun create(config: JsonObject): LoggerBase { 9 | val version = config.getInteger("Version") 10 | return when (version) { 11 | 1 -> InfluxDBLoggerV1(config) 12 | 2 -> InfluxDBLoggerV2(config) 13 | else -> throw IllegalArgumentException("Unknown InfluxDBLogger version: $version") 14 | } 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /source/lib-influxdb/src/main/kotlin/at/rocworks/gateway/logger/influx/InfluxDBLoggerV1.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.logger.influx 2 | 3 | import at.rocworks.gateway.core.data.DataPoint 4 | import at.rocworks.gateway.core.logger.LoggerBase 5 | import io.vertx.core.Future 6 | import io.vertx.core.Promise 7 | 8 | import io.vertx.core.json.JsonObject 9 | import java.util.concurrent.TimeUnit 10 | 11 | import org.influxdb.BatchOptions 12 | import org.influxdb.InfluxDB 13 | import org.influxdb.InfluxDBFactory 14 | import org.influxdb.dto.* 15 | import java.util.concurrent.Callable 16 | 17 | 18 | class InfluxDBLoggerV1(config: JsonObject) : LoggerBase(config) { 19 | private val url = config.getString("Url", "") 20 | private val username = config.getString("Username", "") 21 | private val password = config.getString("Password", "") 22 | private val database = config.getString("Database", "test") 23 | 24 | private var enabled = false 25 | 26 | private var session: InfluxDB? = null 27 | 28 | private fun connect() = if (username == null || username == "") 29 | InfluxDBFactory.connect(url) 30 | else 31 | InfluxDBFactory.connect(url, username, password) 32 | 33 | override fun open(): Future { 34 | val result = Promise.promise() 35 | vertx.executeBlocking(Callable { 36 | try { 37 | connect().let { 38 | session = it 39 | val response: Pong = it.ping() 40 | if (!response.isGood) { 41 | enabled = true 42 | result.complete() 43 | } else { 44 | it.setLogLevel(InfluxDB.LogLevel.NONE) 45 | it.query(Query("CREATE DATABASE $database")) 46 | it.setDatabase(database) 47 | var options = BatchOptions.DEFAULTS 48 | options = options.bufferLimit(writeParameterBlockSize) 49 | it.enableBatch(options) 50 | logger.info("InfluxDB connected.") 51 | enabled = true 52 | result.complete() 53 | } 54 | } 55 | } catch (e: Exception) { 56 | logger.severe("InfluxDB connect failed! [${e.message}]") 57 | enabled = false 58 | result.fail(e) 59 | } 60 | }) 61 | return result.future() 62 | } 63 | 64 | override fun close(): Future { 65 | val promise = Promise.promise() 66 | session?.close() 67 | enabled = false 68 | promise.complete() 69 | return promise.future() 70 | } 71 | 72 | override fun isEnabled(): Boolean { 73 | return enabled 74 | } 75 | 76 | private fun influxPointOf(dp: DataPoint): Point { 77 | val point = Point.measurement(dp.topic.systemName) // TODO: configurable measurement name 78 | .time(dp.value.sourceTime().toEpochMilli(), TimeUnit.MILLISECONDS) 79 | .tag("tag", dp.topic.getBrowsePathOrNode().toString()) // TODO: add topicName, topicType, topicNode, ... 80 | .tag("address", dp.topic.topicNode) 81 | .tag("status", dp.value.statusAsString()) 82 | 83 | val numeric: Double? = dp.value.valueAsDouble() 84 | if (numeric != null) { 85 | //logger.debug("topic [$topic] numeric [$numeric]") 86 | point.addField("value", numeric) 87 | } else { 88 | //logger.debug("topic [$topic] text [${value.valueAsString()}]") 89 | point.addField("text", dp.value.valueAsString()) 90 | } 91 | 92 | return point.build() 93 | } 94 | 95 | override fun writeExecutor() { 96 | val batch = BatchPoints.database(database).build() 97 | pollDatapointBlock { 98 | batch.point(influxPointOf(it)) 99 | } 100 | if (batch.points.size > 0) { 101 | try { 102 | session?.write(batch) 103 | commitDatapointBlock() 104 | valueCounterOutput+=batch.points.size 105 | } catch (e: Exception) { 106 | logger.severe("Error writing batch [${e.message}]") 107 | } 108 | } 109 | } 110 | 111 | override fun queryExecutor( 112 | system: String, 113 | nodeId: String, 114 | fromTimeMS: Long, 115 | toTimeMS: Long, 116 | result: (Boolean, List>?) -> Unit 117 | ) { 118 | val fromTimeNano = fromTimeMS * 1_000_000 119 | val toTimeNano = toTimeMS * 1_000_000 120 | try { 121 | val data = session?.let { s -> 122 | val sql = """ 123 | SELECT time, servertime, value, text, status 124 | FROM "$system" 125 | WHERE "address" = '$nodeId' 126 | AND time >= $fromTimeNano AND time <= $toTimeNano 127 | """.trimIndent() 128 | s.query(Query(sql)).let { query -> 129 | query.results.getOrNull(0) 130 | ?.series?.getOrNull(0) 131 | ?.values?.map { 132 | listOf( 133 | it.component1(), 134 | it.component2(), 135 | it.component3() ?: it.component4(), 136 | it.component5() 137 | ) 138 | } 139 | } 140 | } 141 | result(data != null, data) 142 | } catch (e: Exception) { 143 | logger.severe("Error executing query [${e.message}]") 144 | result(false, null) 145 | } 146 | } 147 | } -------------------------------------------------------------------------------- /source/lib-influxdb/src/main/kotlin/at/rocworks/gateway/logger/influx/InfluxDBLoggerV2.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.logger.influx 2 | 3 | import at.rocworks.gateway.core.data.DataPoint 4 | import at.rocworks.gateway.core.logger.LoggerBase 5 | import com.influxdb.LogLevel 6 | import com.influxdb.client.InfluxDBClient 7 | import com.influxdb.client.InfluxDBClientFactory 8 | import com.influxdb.client.domain.WritePrecision 9 | import com.influxdb.client.write.Point 10 | 11 | import io.vertx.core.Future 12 | import io.vertx.core.Promise 13 | import io.vertx.core.json.JsonObject 14 | import java.util.concurrent.Callable 15 | 16 | class InfluxDBLoggerV2(config: JsonObject) : LoggerBase(config) { 17 | private val url = config.getString("Url", "") 18 | private val username = config.getString("Username", "") 19 | private val password = config.getString("Password", "") 20 | private val token = config.getString("Token", "") 21 | private val org = config.getString("Org", "") 22 | private val bucket = config.getString("Bucket", "") 23 | private val measurement = config.getString("Measurement", "") 24 | 25 | private var enabled = false 26 | 27 | private var session: InfluxDBClient? = null 28 | 29 | private fun connect() = if (username.isEmpty() && token.isEmpty()) 30 | InfluxDBClientFactory.create(url) 31 | else if (username.isNotEmpty()) 32 | InfluxDBClientFactory.create(url, username, password.toCharArray()) 33 | else if (token.isNotEmpty()) 34 | InfluxDBClientFactory.create(url, token.toCharArray(), org, bucket) 35 | else null 36 | 37 | override fun open(): Future { 38 | val result = Promise.promise() 39 | vertx.executeBlocking(Callable { 40 | try { 41 | connect()?.let { 42 | session = it 43 | val response = it.ping() 44 | if (response) { 45 | enabled = true 46 | result.complete() 47 | } else { 48 | it.logLevel = LogLevel.NONE 49 | logger.info("InfluxDB connected.") 50 | enabled = true 51 | result.complete() 52 | } 53 | } 54 | } catch (e: Exception) { 55 | logger.severe("InfluxDB connect failed! [${e.message}]") 56 | enabled = false 57 | result.fail(e) 58 | } 59 | }) 60 | return result.future() 61 | } 62 | 63 | override fun close(): Future { 64 | val promise = Promise.promise() 65 | session?.close() 66 | enabled = false 67 | promise.complete() 68 | return promise.future() 69 | } 70 | 71 | override fun isEnabled(): Boolean { 72 | return enabled 73 | } 74 | 75 | private fun influxPointOf(dp: DataPoint): Point { 76 | val point = Point.measurement(if (measurement.isNotEmpty()) measurement else dp.topic.systemName) 77 | .time(dp.value.sourceTime(), WritePrecision.MS) 78 | .addTag("tag", dp.topic.getBrowsePathOrNode().toString()) // TODO: add topicName, topicType, topicNode, ... 79 | .addTag("address", dp.topic.topicNode) 80 | .addTag("status", dp.value.statusAsString()) 81 | 82 | val numeric: Double? = dp.value.valueAsDouble() 83 | if (numeric != null) { 84 | //logger.debug("topic [$topic] numeric [$numeric]") 85 | point.addField("value", numeric) 86 | } else { 87 | //logger.debug("topic [$topic] text [${value.valueAsString()}]") 88 | point.addField("text", dp.value.valueAsString()) 89 | } 90 | 91 | return point 92 | } 93 | 94 | override fun writeExecutor() { 95 | session?.writeApiBlocking?.let { api -> 96 | val batch = mutableListOf() 97 | pollDatapointBlock { 98 | batch.add(influxPointOf(it)) 99 | } 100 | if (batch.isNotEmpty()) { 101 | try { 102 | api.writePoints(batch) 103 | commitDatapointBlock() 104 | valueCounterOutput+=batch.size 105 | } catch (e: Exception) { 106 | logger.severe("Error writing batch [${e.message}]") 107 | commitDatapointBlock() // TODO: check the exact error, it could be that only some points are not written, but it could also be that the whole batch is not written 108 | } 109 | } 110 | } 111 | } 112 | 113 | override fun queryExecutor( 114 | system: String, 115 | nodeId: String, 116 | fromTimeMS: Long, 117 | toTimeMS: Long, 118 | result: (Boolean, List>?) -> Unit 119 | ) { 120 | val fromTimeNano = fromTimeMS * 1_000_000 121 | val toTimeNano = toTimeMS * 1_000_000 122 | try { 123 | session?.queryApi?.let { api -> 124 | val data = api.query( 125 | """ 126 | from(bucket: "$bucket") 127 | |> range(start: $fromTimeNano, stop: $toTimeNano) 128 | |> filter(fn: (r) => r["_measurement"] == "$system") 129 | |> filter(fn: (r) => r["address"] == "$nodeId") 130 | |> keep(columns: ["_time", "servertime", "_value", "text", "status"]) 131 | """.trimIndent() 132 | ).firstOrNull()?.let { table -> 133 | table.records.map { record -> 134 | listOf( 135 | record.getValueByKey("_time"), 136 | record.getValueByKey("servertime"), 137 | record.getValueByKey("_value"), 138 | record.getValueByKey("text"), 139 | record.getValueByKey("status") 140 | ) 141 | } 142 | } 143 | result(data != null, data) 144 | } ?: result(false, null) 145 | } catch (e: Exception) { 146 | logger.severe("Error executing query [${e.message}]") 147 | result(false, null) 148 | } 149 | } 150 | } -------------------------------------------------------------------------------- /source/lib-iotdb/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-library-conventions' 3 | } 4 | 5 | dependencies { 6 | api project(':lib-core') 7 | 8 | implementation "io.vertx:vertx-core:$vertxVersion" 9 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 10 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 11 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 12 | 13 | implementation 'org.apache.iotdb:iotdb-session:1.3.0' 14 | } 15 | -------------------------------------------------------------------------------- /source/lib-neo4j/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-library-conventions' 3 | } 4 | 5 | dependencies { 6 | api project(':lib-core') 7 | 8 | implementation "io.vertx:vertx-core:$vertxVersion" 9 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 10 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 11 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 12 | implementation 'org.neo4j.driver:neo4j-java-driver:5.14.0' 13 | } -------------------------------------------------------------------------------- /source/lib-opensearch/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-library-conventions' 3 | } 4 | 5 | dependencies { 6 | api project(':lib-core') 7 | 8 | implementation "io.vertx:vertx-core:$vertxVersion" 9 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 10 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 11 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 12 | implementation 'org.opensearch.client:opensearch-java:2.10.1' 13 | implementation "org.opensearch.client:opensearch-rest-client:2.13.0" 14 | } 15 | -------------------------------------------------------------------------------- /source/lib-opensearch/src/main/kotlin/at/rocworks/gateway/logger/opensearch/OpenSearchLogger.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.logger.opensearch 2 | 3 | import at.rocworks.gateway.core.data.Topic 4 | import at.rocworks.gateway.core.logger.LoggerBase 5 | import io.vertx.core.Future 6 | import io.vertx.core.Promise 7 | import io.vertx.core.json.JsonObject 8 | import java.time.YearMonth 9 | import org.apache.http.HttpHost 10 | import org.apache.http.auth.AuthScope 11 | import org.apache.http.auth.UsernamePasswordCredentials 12 | import org.apache.http.impl.client.BasicCredentialsProvider 13 | import org.opensearch.client.RestClient 14 | import org.opensearch.client.json.jackson.JacksonJsonpMapper 15 | import org.opensearch.client.opensearch.OpenSearchClient 16 | import org.opensearch.client.opensearch.core.BulkRequest 17 | import org.opensearch.client.opensearch.core.bulk.BulkOperation 18 | import org.opensearch.client.opensearch.core.bulk.IndexOperation 19 | import org.opensearch.client.transport.OpenSearchTransport 20 | import org.opensearch.client.transport.rest_client.RestClientTransport 21 | import java.time.format.DateTimeFormatter 22 | 23 | /* Create an index template "" with an index pattern "-*" with the following JSON index mapping 24 | { 25 | "properties": { 26 | "topicName": { "type": "text" }, 27 | "systemType": { "type": "text" }, 28 | "systemName": { "type": "text" }, 29 | "topicType": { "type": "text" }, 30 | "topicPath": { "type": "text" }, 31 | "topicNode": { "type": "text" }, 32 | "browsePath": { "type": "text" }, 33 | "valueAsString": { "type": "text" }, 34 | "valueAsNumber": { "type": "double" }, 35 | "statusCode": { "type": "text" }, 36 | "sourceTime": { "type": "date" }, 37 | "serverTime": { "type": "date" } 38 | } 39 | } 40 | */ 41 | 42 | class OpenSearchLogger(config: JsonObject) : LoggerBase(config) { 43 | data class IndexData( 44 | val topicName: String, 45 | val systemType: Topic.SystemType, 46 | val systemName: String, 47 | val topicType: Topic.TopicType, 48 | val topicPath: String, 49 | val topicNode: String, 50 | val browsePath: String, 51 | val valueAsString: String, 52 | val valueAsNumber: Double?, 53 | val statusCode: String, 54 | val sourceTime: Long, 55 | val serverTime: Long 56 | ) 57 | 58 | private val host = config.getString("Host", "http://localhost") 59 | private val port = config.getInteger("Port", 9200) 60 | private val index = config.getString("Index", "gateway") 61 | private val username = config.getString("Username", "") 62 | private val password = config.getString("Password", "") 63 | 64 | private val httpHost = HttpHost(host, port) 65 | private val credentialsProvider = BasicCredentialsProvider() 66 | 67 | //Initialize the client 68 | private var client : OpenSearchClient? = null 69 | 70 | override fun open(): Future { 71 | val result = Promise.promise() 72 | try { 73 | val restClient = RestClient.builder(httpHost).setHttpClientConfigCallback { httpClientBuilder -> 74 | httpClientBuilder.setDefaultCredentialsProvider( 75 | credentialsProvider 76 | ) 77 | }.build() 78 | val transport = RestClientTransport(restClient, JacksonJsonpMapper()) 79 | credentialsProvider.setCredentials( 80 | AuthScope(httpHost), 81 | UsernamePasswordCredentials(username, password) 82 | ) 83 | client = OpenSearchClient(transport) 84 | logger.info("OpenSearch connected.") 85 | result.complete() 86 | } catch (e: Exception) { 87 | logger.severe("OpenSearch connect failed! [${e.message}]") 88 | client = null 89 | e.printStackTrace() 90 | result.fail(e) 91 | } 92 | return result.future() 93 | } 94 | 95 | override fun close(): Future { 96 | val promise = Promise.promise() 97 | client = null 98 | promise.complete() 99 | return promise.future() 100 | } 101 | 102 | override fun isEnabled(): Boolean { 103 | return client != null 104 | } 105 | 106 | override fun writeExecutor() { 107 | val indexCurrent = index+"-"+YearMonth.now().format(DateTimeFormatter.ofPattern("yyyy-MM")) 108 | val bulkOperations : MutableList = mutableListOf() 109 | 110 | pollDatapointBlock { point -> 111 | val value = point.value.valueAsDouble() 112 | val data = IndexData( 113 | point.topic.topicName, 114 | point.topic.systemType, 115 | point.topic.systemName, 116 | point.topic.topicType, 117 | point.topic.topicPath, 118 | point.topic.topicNode, 119 | point.topic.getBrowsePathOrNode().toString(), 120 | point.value.valueAsString(), 121 | if (value == null || value.isNaN()) null else value, 122 | point.value.statusCode, 123 | point.value.sourceTime.toEpochMilli(), 124 | point.value.serverTime.toEpochMilli() 125 | ) 126 | val indexOperation = IndexOperation.Builder().index(indexCurrent).document(data).build() 127 | bulkOperations.add(BulkOperation.Builder().index(indexOperation).build()) 128 | } 129 | if (bulkOperations.size > 0) { 130 | try { 131 | client?.let { session -> 132 | val result = session.bulk(BulkRequest.Builder().operations(bulkOperations).build()) 133 | if (result.errors()) { 134 | logger.severe("Bulk had some errors...") 135 | for (item in result.items()) { 136 | if (item.error() != null) { 137 | logger.fine(item.error()!!.reason()) 138 | } 139 | } 140 | } 141 | commitDatapointBlock() 142 | valueCounterOutput += bulkOperations.size 143 | } 144 | } catch (e: Exception) { 145 | logger.severe("Error writing batch [${e.message}]") 146 | } 147 | } 148 | 149 | } 150 | } -------------------------------------------------------------------------------- /source/lib-plc4x/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-library-conventions' 3 | } 4 | 5 | dependencies { 6 | api project(':lib-core') 7 | 8 | implementation "io.vertx:vertx-core:$vertxVersion" 9 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 10 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 11 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 12 | 13 | implementation group: 'org.apache.plc4x', name: 'plc4j-api', version: '0.9.0' 14 | } 15 | -------------------------------------------------------------------------------- /source/lib-plc4x/src/main/kotlin/Plc4xMonitoredItem.kt: -------------------------------------------------------------------------------- 1 | import at.rocworks.gateway.core.data.Topic 2 | import at.rocworks.gateway.core.driver.MonitoredItem 3 | import org.apache.plc4x.java.api.model.PlcSubscriptionHandle 4 | 5 | class Plc4xMonitoredItem(override val item: PlcSubscriptionHandle) : MonitoredItem() { 6 | } 7 | 8 | class Plc4xPolledItem(override val item: Topic) : MonitoredItem() { 9 | } -------------------------------------------------------------------------------- /source/lib-plc4x/src/main/resources/logging.properties: -------------------------------------------------------------------------------- 1 | handlers = java.util.logging.ConsoleHandler, java.util.logging.FileHandler 2 | java.util.logging.ConsoleHandler.level = ALL 3 | java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter 4 | java.util.logging.SimpleFormatter.format=[%1$tF %1$tT][%4$-7s][%3$-30.30s] %5$s %n 5 | 6 | #java.util.logging.FileHandler.level = ALL 7 | #java.util.logging.FileHandler.pattern = gateway.log.%g 8 | #java.util.logging.FileHandler.limit = 50000 9 | #java.util.logging.FileHandler.count = 1 10 | #java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter 11 | #java.util.logging.FileHandler.format=[%1$tF %1$tT][%4$-7s][%3$-50.50s] %5$s %n 12 | -------------------------------------------------------------------------------- /source/lib-questdb/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-library-conventions' 3 | } 4 | 5 | dependencies { 6 | api project(':lib-core') 7 | 8 | implementation "io.vertx:vertx-core:$vertxVersion" 9 | implementation "io.vertx:vertx-lang-kotlin:$vertxVersion" 10 | implementation "io.vertx:vertx-config-yaml:$vertxVersion" 11 | implementation "io.vertx:vertx-service-discovery:$vertxVersion" 12 | 13 | implementation 'org.questdb:questdb:8.1.2' 14 | } 15 | -------------------------------------------------------------------------------- /source/lib-questdb/src/main/kotlin/at/rocworks/gateway/logger/questdb/QuestDBLogger.kt: -------------------------------------------------------------------------------- 1 | package at.rocworks.gateway.logger.questdb 2 | 3 | import at.rocworks.gateway.core.logger.LoggerBase 4 | import io.vertx.core.Future 5 | import io.vertx.core.Promise 6 | 7 | import io.vertx.core.json.JsonObject 8 | 9 | import io.questdb.client.Sender 10 | 11 | /* 12 | CREATE TABLE gateway ( 13 | time timestamp, 14 | system symbol, 15 | address symbol, 16 | value double, 17 | text varchar 18 | ) TIMESTAMP(time) PARTITION BY DAY; 19 | 20 | ALTER TABLE gateway DEDUP ENABLE UPSERT KEYS(time, system, address) 21 | */ 22 | 23 | class QuestDBLogger(config: JsonObject) : LoggerBase(config) { 24 | private val url = config.getString("Config", "http::addr=localhost:9000;") 25 | private val table = config.getString("Table", "gateway") 26 | private val autoFlush = config.getBoolean("AutoFlush", false) 27 | 28 | @Volatile 29 | private var sender : Sender? = null 30 | 31 | override fun open(): Future { 32 | val result = Promise.promise() 33 | try { 34 | logger.info("QuestDB connect to $url") 35 | sender = Sender.fromConfig(url) 36 | logger.info("QuestDB connected.") 37 | result.complete() 38 | } catch (e: Exception) { 39 | logger.severe("QuestDB connect failed! [${e.message}]") 40 | result.fail(e) 41 | } 42 | return result.future() 43 | } 44 | 45 | override fun close(): Future { 46 | val promise = Promise.promise() 47 | sender?.close() 48 | sender = null 49 | promise.complete() 50 | return promise.future() 51 | } 52 | 53 | override fun isEnabled(): Boolean { 54 | return sender != null 55 | } 56 | 57 | override fun writeExecutor() { 58 | try { 59 | val size = pollDatapointBlock { point -> 60 | val address = point.topic.getBrowsePathOrNode().toString() 61 | val value = point.value.valueAsDouble() ?: Double.NaN 62 | val text = point.value.stringValue() 63 | sender?.table(table) 64 | ?.symbol("system", point.topic.systemName) 65 | ?.symbol("address", address) 66 | ?.symbol("status", point.value.statusCode) 67 | ?.doubleColumn("value", value) 68 | ?.stringColumn("text", text) 69 | ?.at(point.value.sourceTime) 70 | } 71 | if (size > 0) { 72 | try { 73 | if (!autoFlush) sender?.flush() 74 | commitDatapointBlock() // with autoFlush==true it is possible to lose values if the connection gets broken!! 75 | valueCounterOutput += size 76 | } catch (e: Exception) { 77 | logger.severe("Error writing batch [${e.message}]") 78 | e.printStackTrace() 79 | } 80 | } 81 | } catch (e: Exception) { 82 | logger.severe("Error writing batch [${e.message}]") 83 | e.printStackTrace() 84 | } 85 | } 86 | } -------------------------------------------------------------------------------- /source/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'gateway' 2 | 3 | include('app', 'app-plc4x', 'test') 4 | 5 | include('lib-core', 6 | 'lib-plc4x', 7 | 'lib-influxdb', 8 | 'lib-questdb', 9 | 'lib-iotdb', 10 | 'lib-neo4j', 11 | 'lib-opensearch') 12 | 13 | dependencyResolutionManagement { 14 | repositories { 15 | mavenLocal() 16 | maven { 17 | url = uri('https://repo.maven.apache.org/maven2/') 18 | } 19 | } 20 | } -------------------------------------------------------------------------------- /source/test/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'gateway.kotlin-application-conventions' 3 | } 4 | 5 | dependencies { 6 | implementation project(':lib-core') 7 | implementation 'io.vertx:vertx-core:4.0.3' 8 | implementation 'io.vertx:vertx-config-yaml:4.0.3' 9 | implementation 'io.vertx:vertx-service-discovery:4.0.3' 10 | } 11 | 12 | application { 13 | // Define the main class for the application. 14 | mainClass = 'Test' 15 | } 16 | -------------------------------------------------------------------------------- /source/test/src/main/kotlin/Test.kt: -------------------------------------------------------------------------------- 1 | import at.rocworks.gateway.core.data.* 2 | 3 | import kotlin.Throws 4 | import kotlin.jvm.JvmStatic 5 | 6 | import io.vertx.core.Vertx 7 | import io.vertx.core.json.Json 8 | import io.vertx.core.json.JsonArray 9 | import io.vertx.core.json.JsonObject 10 | 11 | import java.lang.Exception 12 | import java.util.logging.LogManager 13 | import kotlin.system.exitProcess 14 | 15 | object Test { 16 | 17 | @Throws(Exception::class) 18 | @JvmStatic 19 | fun main(args: Array) { 20 | val stream = Test::class.java.classLoader.getResourceAsStream("logging.properties") 21 | try { 22 | LogManager.getLogManager().readConfiguration(stream) 23 | } catch (e: Exception) { 24 | println("Error loading logging.properties!") 25 | exitProcess(-1) 26 | } 27 | 28 | val vertx = Vertx.vertx() 29 | 30 | // Register Message Types 31 | vertx.eventBus().registerDefaultCodec(Topic::class.java, 32 | CodecTopic() 33 | ) 34 | 35 | println("Hello Automation Gateway!") 36 | TestTopics.main() 37 | } 38 | } -------------------------------------------------------------------------------- /source/test/src/main/kotlin/TestTopics.kt: -------------------------------------------------------------------------------- 1 | import at.rocworks.gateway.core.data.Topic 2 | 3 | object TestTopics { 4 | val dollar = "\$" 5 | 6 | fun main() { 7 | listOf( 8 | "opc/unified/node/1/16.687.1.0.0.0", 9 | "opc/unified/node:Value/1/16.687.1.0.0.0", 10 | "opc/unified/node:Pretty/1/16.687.1.0.0.0", 11 | "opc/unified/symbol/HMI_Tag_2", 12 | "opc/unified/path/Tags/HMI_Tag_3", 13 | "opc/oa/node:Value/2/ExampleDP_Float.ExampleDP_Arg1", 14 | "opc/oa/node:value/2/ExampleDP_Float.ExampleDP_Arg1", 15 | "opc/oa/node:Json/2/ExampleDP_Float.ExampleDP_Arg1", 16 | "opc/oa/node:json/2/ExampleDP_Float.ExampleDP_Arg1", 17 | "opc/oa/rpc/d490352d-4142-4729-aab2-2f0101f4701e", 18 | "\$SYS/broker/uptime" 19 | ).forEach { 20 | val t = Topic.parseTopic(it) 21 | println("--- " + t.topicName + " ---") 22 | if (t.isValid()) { 23 | println("parsedTopic : " + t.toString()) 24 | val j = Topic.encodeToJson(t) 25 | println("encodeToJson : " + j.toString()) 26 | val x = Topic.decodeFromJson(j) 27 | println("decodeFromJson: " + x.toString()) 28 | } else { 29 | println("Invalid! " + t.topicName) 30 | } 31 | } 32 | } 33 | } -------------------------------------------------------------------------------- /source/test/src/main/resources/logging.properties: -------------------------------------------------------------------------------- 1 | handlers = java.util.logging.ConsoleHandler, java.util.logging.FileHandler 2 | java.util.logging.ConsoleHandler.level = ALL 3 | java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter 4 | java.util.logging.SimpleFormatter.format=[%1$tF %1$tT][%4$-7s][%3$-30.30s] %5$s %n 5 | 6 | #java.util.logging.FileHandler.level = ALL 7 | #java.util.logging.FileHandler.pattern = gateway.log.%g 8 | #java.util.logging.FileHandler.limit = 50000 9 | #java.util.logging.FileHandler.count = 1 10 | #java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter 11 | #java.util.logging.FileHandler.format=[%1$tF %1$tT][%4$-7s][%3$-50.50s] %5$s %n 12 | --------------------------------------------------------------------------------