├── docker ├── data │ └── logFilePaths ├── X2Stream.jar ├── dbmanager-entrypoint.sh ├── Dockerfile.file2stream ├── anomalies.sql ├── Dockerfile.mysql ├── Dockerfile.dbserver ├── Dockerfile.logflash ├── build.sh ├── wait-for.sh ├── docker-compose.yml ├── docker-entrypoint.sh └── resources │ └── config.properties ├── src └── main │ ├── java │ ├── humanfeedback │ │ ├── FeedBackMonitor.java │ │ ├── FeedBackFalseAlarms.java │ │ ├── SuspiciousRegion.java │ │ ├── TuningRegion.java │ │ └── SuspiciousRegionMonitor.java │ ├── TCFGmodel │ │ ├── FileUtil.java │ │ ├── TCFG.java │ │ ├── TCFGConstructor.java │ │ ├── ShareMemory.java │ │ └── TCFGUtil.java │ ├── modelconstruction │ │ ├── MatrixUpdater.java │ │ ├── MatrixUpdaterMode1.java │ │ ├── MatrixUpdaterMode3.java │ │ ├── MatrixTriple.java │ │ ├── MetricsMonitoring.java │ │ ├── IndenpendencyFilter.java │ │ ├── TransferParamMatrix.java │ │ └── MatrixUpdaterMode2.java │ ├── workflow │ │ ├── WorkFlow.java │ │ ├── Config.java │ │ ├── WorkFlowMode1.java │ │ ├── WatermarkGenerator.java │ │ ├── WorkFlowMode2.java │ │ ├── Controller.java │ │ └── CommandListener.java │ ├── faultdiagnosis │ │ ├── FaultDiagnosis.java │ │ ├── FaultDiagnosisMode3.java │ │ ├── Anomaly.java │ │ ├── FaultDiagnosisMode1.java │ │ └── FaultDiagnosisMode2.java │ ├── templatemining │ │ ├── LogCluster.java │ │ ├── Node.java │ │ ├── Parse.java │ │ └── LogParser.java │ ├── dao │ │ ├── AnomalyJSON.java │ │ └── MysqlUtil.java │ └── Entrance.java │ └── resources │ ├── log4j.properties │ └── config.properties ├── README.md └── pom.xml /docker/data/logFilePaths: -------------------------------------------------------------------------------- 1 | /data/adc-06-04-2019-2_20k -------------------------------------------------------------------------------- /docker/X2Stream.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jia-tong-FINE/LogFlash/HEAD/docker/X2Stream.jar -------------------------------------------------------------------------------- /docker/dbmanager-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | sh /wait-for.sh mysql:3306 3 | java -jar /opt/logflashdb.jar -------------------------------------------------------------------------------- /docker/Dockerfile.file2stream: -------------------------------------------------------------------------------- 1 | FROM openjdk:11.0.7-jre-slim 2 | ADD X2Stream.jar /opt 3 | CMD ["java", "-jar", "/opt/X2Stream.jar"] -------------------------------------------------------------------------------- /docker/anomalies.sql: -------------------------------------------------------------------------------- 1 | DROP SCHEMA IF EXISTS anomalies; 2 | CREATE SCHEMA anomalies; 3 | USE anomalies; 4 | DROP TABLE IF EXISTS `anomaly_log`; 5 | -------------------------------------------------------------------------------- /src/main/java/humanfeedback/FeedBackMonitor.java: -------------------------------------------------------------------------------- 1 | package humanfeedback; 2 | 3 | public interface FeedBackMonitor { 4 | 5 | void addToWhiteList(String eventID); 6 | 7 | } 8 | -------------------------------------------------------------------------------- /docker/Dockerfile.mysql: -------------------------------------------------------------------------------- 1 | FROM mysql 2 | 3 | WORKDIR / 4 | 5 | ENV MYSQL_ROOT_PASSWORD jt1118961 6 | 7 | COPY ./anomalies.sql /docker-entrypoint-initdb.d 8 | 9 | ENTRYPOINT ["/bin/bash", "/entrypoint.sh"] 10 | 11 | CMD ["mysqld"] -------------------------------------------------------------------------------- /docker/Dockerfile.dbserver: -------------------------------------------------------------------------------- 1 | FROM openjdk:8-jre-alpine 2 | ADD logflashdb.jar /opt 3 | 4 | COPY dbmanager-entrypoint.sh / 5 | COPY wait-for.sh / 6 | Run chmod 775 /dbmanager-entrypoint.sh 7 | Run chmod 775 /wait-for.sh 8 | 9 | ENTRYPOINT ["/dbmanager-entrypoint.sh"] 10 | CMD ["--help"] -------------------------------------------------------------------------------- /src/main/java/TCFGmodel/FileUtil.java: -------------------------------------------------------------------------------- 1 | package TCFGmodel; 2 | 3 | import java.io.File; 4 | 5 | public class FileUtil { 6 | 7 | public static void CreateDir(String sp) { 8 | File file=new File(sp); 9 | if(!file.exists()){ 10 | file.mkdir(); 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/modelconstruction/MatrixUpdater.java: -------------------------------------------------------------------------------- 1 | package modelconstruction; 2 | 3 | import org.apache.flink.api.java.tuple.Tuple7; 4 | 5 | import java.util.List; 6 | 7 | public interface MatrixUpdater { 8 | 9 | double calGradientForInfected(long a, long b, TransferParamMatrix c, List d, long e, long f); 10 | 11 | double calGradientForUninfected(long a, long b, TransferParamMatrix c, List d, long e); 12 | 13 | 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/workflow/WorkFlow.java: -------------------------------------------------------------------------------- 1 | package workflow; 2 | 3 | 4 | import org.apache.flink.api.java.utils.ParameterTool; 5 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 6 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 7 | 8 | public interface WorkFlow { 9 | void workflow(StreamExecutionEnvironment env, DataStreamSource dataStream, ParameterTool parameter) throws Exception; 10 | } 11 | -------------------------------------------------------------------------------- /src/main/java/workflow/Config.java: -------------------------------------------------------------------------------- 1 | package workflow; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | public class Config { 7 | public static Map parameter; 8 | public static Map valueStates = new HashMap<>(); 9 | 10 | static { 11 | //reserve a flag for each valueState 12 | valueStates.put("transferParamMatrix", 0); 13 | valueStates.put("tcfgValueState", 0); 14 | valueStates.put("parseTree", 0); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/workflow/WorkFlowMode1.java: -------------------------------------------------------------------------------- 1 | package workflow; 2 | 3 | 4 | import org.apache.flink.api.java.utils.ParameterTool; 5 | import org.apache.flink.streaming.api.datastream.DataStreamSource; 6 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 7 | 8 | public class WorkFlowMode1 implements WorkFlow{ 9 | @Override 10 | public void workflow(StreamExecutionEnvironment env, DataStreamSource dataStream, ParameterTool parameter) throws Exception{ 11 | 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/modelconstruction/MatrixUpdaterMode1.java: -------------------------------------------------------------------------------- 1 | package modelconstruction; 2 | 3 | import org.apache.flink.api.java.tuple.Tuple7; 4 | import java.util.List; 5 | 6 | 7 | public class MatrixUpdaterMode1 implements MatrixUpdater { 8 | 9 | @Override 10 | public double calGradientForInfected(long a, long b, TransferParamMatrix c, List d, long e, long f) { 11 | return 0; 12 | } 13 | 14 | @Override 15 | public double calGradientForUninfected(long a, long b, TransferParamMatrix c, List d, long e) { 16 | return 0; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/modelconstruction/MatrixUpdaterMode3.java: -------------------------------------------------------------------------------- 1 | package modelconstruction; 2 | 3 | import org.apache.flink.api.java.tuple.Tuple7; 4 | 5 | import java.util.List; 6 | 7 | 8 | public class MatrixUpdaterMode3 implements MatrixUpdater { 9 | 10 | @Override 11 | public double calGradientForInfected(long a, long b, TransferParamMatrix c, List d, long e, long f) { 12 | return 0; 13 | } 14 | 15 | @Override 16 | public double calGradientForUninfected(long a, long b, TransferParamMatrix c, List d, long e) { 17 | return 0; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/humanfeedback/FeedBackFalseAlarms.java: -------------------------------------------------------------------------------- 1 | package humanfeedback; 2 | 3 | import faultdiagnosis.Anomaly; 4 | import java.util.Queue; 5 | import java.util.concurrent.LinkedBlockingQueue; 6 | 7 | public class FeedBackFalseAlarms { 8 | 9 | private Queue falseAlarms = new LinkedBlockingQueue<>(); 10 | 11 | public Anomaly getAnomalyFromFalseAlarms() { 12 | return falseAlarms.poll(); 13 | } 14 | 15 | public void addAnomalyToFalseAlarms(Anomaly anomaly) { 16 | falseAlarms.offer(anomaly); 17 | } 18 | 19 | public boolean isEmply() { 20 | return falseAlarms.isEmpty(); 21 | } 22 | 23 | 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/faultdiagnosis/FaultDiagnosis.java: -------------------------------------------------------------------------------- 1 | package faultdiagnosis; 2 | 3 | import TCFGmodel.TCFG; 4 | import org.apache.flink.api.java.tuple.Tuple7; 5 | 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | public interface FaultDiagnosis { 10 | 11 | double calProbability(double ti, double tj, double alphaji, long timeWindow, long delta); 12 | 13 | double calProbabilityOfCurrentEntry(List logList, Map> paramMatrix, long timeWindow, long delta); 14 | 15 | List detectSuspiciousRequest(TCFG tcfg, List logList); 16 | 17 | Anomaly faultDiagnosisProcess (TCFG tcfg, List tempList); 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/humanfeedback/SuspiciousRegion.java: -------------------------------------------------------------------------------- 1 | package humanfeedback; 2 | 3 | import faultdiagnosis.Anomaly; 4 | import java.util.LinkedList; 5 | import java.util.Queue; 6 | 7 | public class SuspiciousRegion { 8 | 9 | public Queue sequenceAnomalyQueue = new LinkedList<>(); 10 | public Queue latencyAnomalyQueue = new LinkedList<>(); 11 | public Queue redundancyAnomalyQueue = new LinkedList<>(); 12 | 13 | public boolean isEmpty() { 14 | if (sequenceAnomalyQueue.isEmpty() && latencyAnomalyQueue.isEmpty() && redundancyAnomalyQueue.isEmpty()) { 15 | return true; 16 | } 17 | return false; 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/modelconstruction/MatrixTriple.java: -------------------------------------------------------------------------------- 1 | package modelconstruction; 2 | 3 | public class MatrixTriple{ 4 | 5 | double value; 6 | long timeWeight; 7 | 8 | 9 | public MatrixTriple(double value, long timeWeight) { 10 | this.value = value; 11 | this.timeWeight = timeWeight; 12 | } 13 | 14 | 15 | public void setValue(double value) { 16 | this.value = value; 17 | } 18 | 19 | public void setTimeWeight(long timeWeight) { 20 | this.timeWeight = timeWeight; 21 | } 22 | 23 | public double getValue() { 24 | return value; 25 | } 26 | 27 | public long getTimeWeight() { 28 | return timeWeight; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/templatemining/LogCluster.java: -------------------------------------------------------------------------------- 1 | package templatemining; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | class LogCluster implements Serializable { 7 | public List logTemplate; 8 | public String eventID; 9 | 10 | public LogCluster() { 11 | } 12 | 13 | LogCluster(List logTemplate, String eventID) { 14 | this.logTemplate = logTemplate; 15 | this.eventID = eventID; 16 | } 17 | 18 | List getLogTemplate() { 19 | return logTemplate; 20 | } 21 | 22 | void setLogTemplate(List logTemplate) { 23 | this.logTemplate = logTemplate; 24 | } 25 | 26 | String getEventID() { 27 | return eventID; 28 | } 29 | 30 | void setEventID(String eventID) { 31 | this.eventID = eventID; 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/faultdiagnosis/FaultDiagnosisMode3.java: -------------------------------------------------------------------------------- 1 | package faultdiagnosis; 2 | 3 | import TCFGmodel.TCFG; 4 | import org.apache.flink.api.java.tuple.Tuple7; 5 | import java.util.List; 6 | import java.util.Map; 7 | 8 | public class FaultDiagnosisMode3 implements FaultDiagnosis{ 9 | 10 | 11 | @Override 12 | public double calProbability(double ti, double tj, double alphaji, long timeWindow, long delta) { 13 | return 0; 14 | } 15 | 16 | @Override 17 | public double calProbabilityOfCurrentEntry(List logList, Map> paramMatrix, long timeWindow, long delta) { 18 | return 0; 19 | } 20 | 21 | @Override 22 | public List detectSuspiciousRequest(TCFG tcfg, List logList) { 23 | return null; 24 | } 25 | 26 | @Override 27 | public Anomaly faultDiagnosisProcess(TCFG tcfg, List tempList) { 28 | return null; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/humanfeedback/TuningRegion.java: -------------------------------------------------------------------------------- 1 | package humanfeedback; 2 | 3 | import TCFGmodel.TCFGUtil; 4 | import faultdiagnosis.Anomaly; 5 | 6 | import java.util.ArrayList; 7 | import java.util.LinkedList; 8 | import java.util.List; 9 | import java.util.Queue; 10 | 11 | public class TuningRegion { 12 | 13 | Queue anomalyQueue = new LinkedList<>(); 14 | List eventWhiteList = new ArrayList<>(); 15 | 16 | 17 | public void addAnomalyToQueue(Anomaly e) { 18 | anomalyQueue.offer(e); 19 | } 20 | 21 | public Anomaly pollAnomalyFromQueue() { 22 | return anomalyQueue.poll(); 23 | } 24 | 25 | public Anomaly showAnomalyFromQueue() { 26 | return anomalyQueue.element(); 27 | } 28 | 29 | public void addEventToWhiteList(String eventID) { 30 | eventWhiteList.add(eventID); 31 | } 32 | 33 | public void deleteEventFromWhiteList(String eventID) { 34 | eventWhiteList.remove(eventID); 35 | } 36 | 37 | public boolean isEmpty() { 38 | return anomalyQueue.isEmpty(); 39 | } 40 | 41 | public List getEventWhiteList() { 42 | return eventWhiteList; 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | ################################################################################ 18 | 19 | log4j.rootLogger=INFO, console 20 | 21 | log4j.appender.console=org.apache.log4j.ConsoleAppender 22 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 23 | log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %c - %m%n -------------------------------------------------------------------------------- /src/main/java/templatemining/Node.java: -------------------------------------------------------------------------------- 1 | package templatemining; 2 | 3 | import java.io.Serializable; 4 | import java.util.ArrayList; 5 | import java.util.HashMap; 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | public class Node implements Serializable { 10 | public int depth; 11 | public Map childD; 12 | public List childLG; 13 | public String digitOrtoken; 14 | 15 | public Node() { 16 | childD = new HashMap<>(); 17 | childLG = new ArrayList<>(); 18 | depth = 0; 19 | } 20 | 21 | Node(int depth, String digitOrtoken) { 22 | childD = new HashMap<>(); 23 | childLG = new ArrayList<>(); 24 | this.depth = depth; 25 | this.digitOrtoken = digitOrtoken; 26 | } 27 | 28 | Map getChildD() { 29 | return childD; 30 | } 31 | 32 | List getChildLG() { 33 | return childLG; 34 | } 35 | 36 | int getDepth() { 37 | return depth; 38 | } 39 | 40 | String getDigitOrtoken() { 41 | return digitOrtoken; 42 | } 43 | 44 | void setChildD(String seqLen, Node node) { 45 | childD.put(seqLen, node); 46 | } 47 | 48 | void setChildLG(LogCluster child) { 49 | childLG.add(child); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /docker/Dockerfile.logflash: -------------------------------------------------------------------------------- 1 | FROM openjdk:8-jre-alpine 2 | 3 | # Install requirements 4 | RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories 5 | RUN apk add bash snappy libc6-compat netcat-openbsd 6 | 7 | # Flink environment variables 8 | ENV FLINK_INSTALL_PATH=/opt 9 | ENV FLINK_HOME $FLINK_INSTALL_PATH/flink 10 | ENV FLINK_LIB_DIR $FLINK_HOME/lib 11 | ENV FLINK_PLUGINS_DIR $FLINK_HOME/plugins 12 | ENV FLINK_OPT_DIR $FLINK_HOME/opt 13 | ENV FLINK_JOB_ARTIFACTS_DIR $FLINK_INSTALL_PATH/artifacts 14 | ENV FLINK_USR_LIB_DIR $FLINK_HOME/usrlib 15 | ENV PATH $PATH:$FLINK_HOME/bin 16 | 17 | ARG flink_dist=NOT_SET 18 | ARG job_artifacts=NOT_SET 19 | 20 | # Install build dependencies and flink 21 | ADD $flink_dist $FLINK_INSTALL_PATH/ 22 | ADD $job_artifacts/* $FLINK_JOB_ARTIFACTS_DIR/ 23 | 24 | RUN set -x && \ 25 | ln -s $FLINK_INSTALL_PATH/flink-[0-9]* $FLINK_HOME && \ 26 | ln -s $FLINK_JOB_ARTIFACTS_DIR $FLINK_USR_LIB_DIR && \ 27 | addgroup -S flink && adduser -D -S -H -G flink -h $FLINK_HOME flink && \ 28 | chown -R flink:flink ${FLINK_INSTALL_PATH}/flink-* && \ 29 | chown -R flink:flink ${FLINK_JOB_ARTIFACTS_DIR}/ && \ 30 | chown -h flink:flink $FLINK_HOME 31 | 32 | COPY docker-entrypoint.sh / 33 | COPY wait-for.sh / 34 | Run chmod 775 /docker-entrypoint.sh 35 | Run chmod 775 /wait-for.sh 36 | 37 | USER flink 38 | EXPOSE 8081 6123 30822 30833 39 | ENTRYPOINT ["/docker-entrypoint.sh"] 40 | CMD ["--help"] -------------------------------------------------------------------------------- /docker/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | usage() { 3 | cat < --flink-path [--image-name ] 6 | build.sh --help 7 | If the --image-name flag is not used the built image name will be 'flink-job'. 8 | EOF 9 | exit 1 10 | } 11 | 12 | while [[ $# -ge 1 ]]; do 13 | key="$1" 14 | case $key in 15 | --job-artifacts) 16 | JOB_ARTIFACTS_PATH="$2" 17 | shift 18 | ;; 19 | --flink-path) 20 | FLINK_PATH="$2" 21 | shift 22 | ;; 23 | --image-name) 24 | IMAGE_NAME="$2" 25 | shift 26 | ;; 27 | --help) 28 | usage 29 | ;; 30 | *) 31 | # unknown option 32 | ;; 33 | esac 34 | shift 35 | done 36 | 37 | IMAGE_NAME=${IMAGE_NAME:-flink-job} 38 | 39 | # TMPDIR must be contained within the working directory so it is part of the 40 | # Docker context. (i.e. it can't be mktemp'd in /tmp) 41 | TMPDIR=_TMP_ 42 | 43 | cleanup() { 44 | rm -rf "${TMPDIR}" 45 | } 46 | trap cleanup EXIT 47 | mkdir -p "${TMPDIR}" 48 | 49 | JOB_ARTIFACTS_DIST="${TMPDIR}/artifacts" 50 | mkdir -p ${JOB_ARTIFACTS_DIST} 51 | 52 | job_artifacts_array=("${JOB_ARTIFACTS_PATH}") 53 | for artifact in "${job_artifacts_array[@]}"; do 54 | cp "${artifact}" ${JOB_ARTIFACTS_DIST}/ 55 | done 56 | 57 | if [ -n "${FLINK_PATH}" ]; then 58 | FLINK_DIST="${TMPDIR}/flink.tgz" 59 | cp "${FLINK_PATH}" "${FLINK_DIST}" 60 | else 61 | usage 62 | fi 63 | 64 | if [ -d models ];then 65 | echo dir exist 66 | else 67 | mkdir models 68 | chmod 777 models 69 | fi 70 | 71 | docker build --build-arg flink_dist="${FLINK_DIST}" --build-arg job_artifacts="${JOB_ARTIFACTS_DIST}" -t "${IMAGE_NAME}" -f Dockerfile.logflash . 72 | -------------------------------------------------------------------------------- /docker/wait-for.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | TIMEOUT=180 4 | QUIET=0 5 | 6 | echoerr() { 7 | if [ "$QUIET" -ne 1 ]; then printf "%s\n" "$*" 1>&2; fi 8 | } 9 | 10 | usage() { 11 | exitcode="$1" 12 | cat << USAGE >&2 13 | Usage: 14 | $cmdname host:port [-t timeout] [-- command args] 15 | -q | --quiet Do not output any status messages 16 | -t TIMEOUT | --timeout=timeout Timeout in seconds, zero for no timeout 17 | -- COMMAND ARGS Execute command with args after the test finishes 18 | USAGE 19 | exit "$exitcode" 20 | } 21 | 22 | wait_for() { 23 | for i in `seq $TIMEOUT` ; do 24 | nc -z "$HOST" "$PORT" > /dev/null 2>&1 25 | 26 | result=$? 27 | if [ $result -eq 0 ] ; then 28 | if [ $# -gt 0 ] ; then 29 | exec "$@" 30 | fi 31 | exit 0 32 | fi 33 | sleep 1 34 | done 35 | echo "Operation timed out" >&2 36 | exit 1 37 | } 38 | 39 | while [ $# -gt 0 ] 40 | do 41 | case "$1" in 42 | *:* ) 43 | HOST=$(printf "%s\n" "$1"| cut -d : -f 1) 44 | PORT=$(printf "%s\n" "$1"| cut -d : -f 2) 45 | shift 1 46 | ;; 47 | -q | --quiet) 48 | QUIET=1 49 | shift 1 50 | ;; 51 | -t) 52 | TIMEOUT="$2" 53 | if [ "$TIMEOUT" = "" ]; then break; fi 54 | shift 2 55 | ;; 56 | --timeout=*) 57 | TIMEOUT="${1#*=}" 58 | shift 1 59 | ;; 60 | --) 61 | shift 62 | break 63 | ;; 64 | --help) 65 | usage 0 66 | ;; 67 | *) 68 | echoerr "Unknown argument: $1" 69 | usage 1 70 | ;; 71 | esac 72 | done 73 | 74 | if [ "$HOST" = "" -o "$PORT" = "" ]; then 75 | echoerr "Error: you need to provide a host and port to test." 76 | usage 2 77 | fi 78 | 79 | wait_for "$@" 80 | -------------------------------------------------------------------------------- /docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Docker compose file for a Flink job cluster deployment. 2 | # 3 | # Parameters: 4 | # * FLINK_DOCKER_IMAGE_NAME - Image name to use for the deployment (default: flink-job:latest) 5 | # * FLINK_JOB - Name of the Flink job to execute (default: none) 6 | # * DEFAULT_PARALLELISM - Default parallelism with which to start the job (default: 1) 7 | # * FLINK_JOB_ARGUMENTS - Additional arguments which will be passed to the job cluster (default: none) 8 | 9 | version: "2.2" 10 | services: 11 | jobmanager: 12 | image: ${FLINK_DOCKER_IMAGE_NAME:-flink-job} 13 | ports: 14 | - "30881:8081" 15 | hostname: jobmanager 16 | command: job-manager --job-classname ${FLINK_JOB} -Djobmanager.rpc.address=jobmanager -Dparallelism.default=${DEFAULT_PARALLELISM:-1} -Dlog.file=/opt/flink/log ${FLINK_JOB_ARGUMENTS} 17 | volumes: 18 | - ./resources:/opt/resources 19 | - ./models:/opt/models 20 | depends_on: 21 | - file2stream 22 | - mysql 23 | taskmanager: 24 | image: ${FLINK_DOCKER_IMAGE_NAME:-flink-job} 25 | ports: 26 | - "30822:30822" 27 | hostname: taskmanager 28 | command: task-manager -Djobmanager.rpc.address=jobmanager -Dlog.file=/opt/flink/log 29 | scale: ${DEFAULT_PARALLELISM:-1} 30 | volumes: 31 | - ./resources:/opt/resources 32 | - ./models:/opt/models 33 | depends_on: 34 | - file2stream 35 | - mysql 36 | mysql: 37 | image: logsql 38 | hostname: mysql 39 | dbserver: 40 | image: dbserver 41 | ports: 42 | - "30855:30855" 43 | hostname: dbserver 44 | depends_on: 45 | - mysql 46 | file2stream: 47 | image: file2stream 48 | ports: 49 | - "30833:30833" 50 | - "30837:30837" 51 | hostname: file2stream 52 | volumes: 53 | - ./data:/data 54 | - ./resources:/opt/resources -------------------------------------------------------------------------------- /docker/docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #if unspecified, the hostname of the container is taken as the JobManager address 3 | FLINK_HOME=${FLINK_HOME:-"/opt/flink"} 4 | CONF_FILE="${FLINK_HOME}/conf/flink-conf.yaml" 5 | LOG4J_FILE="${FLINK_HOME}/conf/log4j-console.properties" 6 | JOB_CLUSTER="job-manager" 7 | TASK_MANAGER="task-manager" 8 | 9 | CMD="$1" 10 | shift 11 | 12 | if [ "${CMD}" == "--help" ] || [ "${CMD}" == "-h" ]; then 13 | echo "Usage: $(basename "$0") (${JOB_CLUSTER}|${TASK_MANAGER})" 14 | exit 0 15 | elif [ "${CMD}" == "${JOB_CLUSTER}" ] || [ "${CMD}" == "${TASK_MANAGER}" ]; then 16 | echo "Starting the ${CMD}" 17 | ./wait-for.sh mysql:3306 18 | if [ "${CMD}" == "${TASK_MANAGER}" ]; then 19 | sed -i -e "s/log4j.rootLogger=INFO, console/log4j.rootLogger=INFO, console, file/g" ${LOG4J_FILE} 20 | echo -e "# Log all infos in the given file\nlog4j.appender.file=org.apache.log4j.FileAppender\nlog4j.appender.file.file=${FLINK_HOME}/log/output.log\nlog4j.appender.file.append=false\nlog4j.appender.file.layout=org.apache.log4j.PatternLayout\nlog4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n" >> ${LOG4J_FILE} 21 | echo -e "web.log.path: /opt/flink/log/output.log\ntaskmanager.log.path: /opt/flink/log/output.log" >> ${CONF_FILE} 22 | exec "$FLINK_HOME"/bin/taskmanager.sh start-foreground "$@" 23 | else 24 | sed -i -e "s/log4j.rootLogger=INFO, console/log4j.rootLogger=INFO, console, file/g" ${LOG4J_FILE} 25 | echo -e "# Log all infos in the given file\nlog4j.appender.file=org.apache.log4j.FileAppender\nlog4j.appender.file.file=${FLINK_HOME}/log/output.log\nlog4j.appender.file.append=false\nlog4j.appender.file.layout=org.apache.log4j.PatternLayout\nlog4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n" >> ${LOG4J_FILE} 26 | echo -e "web.log.path: /opt/flink/log/output.log\ntaskmanager.log.path: /opt/flink/log/output.log" >> ${CONF_FILE} 27 | exec "$FLINK_HOME"/bin/standalone-job.sh start-foreground "$@" 28 | fi 29 | fi 30 | 31 | exec "$@" 32 | -------------------------------------------------------------------------------- /src/main/java/workflow/WatermarkGenerator.java: -------------------------------------------------------------------------------- 1 | package workflow; 2 | 3 | import org.apache.flink.api.java.tuple.Tuple7; 4 | import org.apache.flink.api.java.utils.ParameterTool; 5 | import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks; 6 | import org.apache.flink.streaming.api.watermark.Watermark; 7 | 8 | import javax.annotation.Nullable; 9 | 10 | public class WatermarkGenerator { 11 | 12 | public static class BoundedOutOfOrdernessGenerator implements AssignerWithPeriodicWatermarks> { 13 | 14 | private long currentMaxTimestamp; 15 | 16 | @Nullable 17 | @Override 18 | public Watermark getCurrentWatermark() { 19 | ParameterTool parameterTool = ParameterTool.fromMap(Config.parameter); 20 | long maxOutOfOrderness = Long.parseLong(parameterTool.get("maxOutOfOrderness")); // timeWindow milliseconds 21 | return new Watermark(currentMaxTimestamp - maxOutOfOrderness); 22 | } 23 | 24 | @Override 25 | public long extractTimestamp(Tuple7 element, long previousElementTimestamp) { 26 | long timestamp = Long.parseLong(element.f0); 27 | currentMaxTimestamp = Math.max(timestamp, currentMaxTimestamp); 28 | return timestamp; 29 | } 30 | } 31 | 32 | public static class TimeLagWatermarkGenerator implements AssignerWithPeriodicWatermarks> { 33 | 34 | ParameterTool parameterTool = ParameterTool.fromMap(Config.parameter); 35 | private final long maxTimeLag = Long.parseLong(parameterTool.get("timeSlag")); // timeSlag seconds 36 | 37 | @Nullable 38 | @Override 39 | public Watermark getCurrentWatermark() { 40 | return new Watermark(System.currentTimeMillis() - maxTimeLag); 41 | } 42 | 43 | @Override 44 | public long extractTimestamp(Tuple7 element, long previousElementTimestamp) { 45 | return Long.parseLong(element.f0); 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/faultdiagnosis/Anomaly.java: -------------------------------------------------------------------------------- 1 | package faultdiagnosis; 2 | 3 | import com.alibaba.fastjson.annotation.JSONField; 4 | import org.apache.flink.api.java.tuple.Tuple7; 5 | 6 | import java.util.List; 7 | 8 | public class Anomaly { 9 | 10 | @JSONField(name = "LogID") 11 | private String anomalyLogId; 12 | @JSONField(name = "Log") 13 | private Tuple7 anomalyLog; 14 | @JSONField(name = "LogList") 15 | private List anomalyLogList; 16 | @JSONField(name = "Request") 17 | private List suspectedAnomalyRequest; 18 | @JSONField(name = "Type") 19 | private String anomalyType; 20 | 21 | Anomaly() { 22 | } 23 | 24 | public Anomaly(String anomalyLogId, Tuple7 anomalyLog, List anomalyLogList, List suspectedAnomalyRequest, String anomalyType) { 25 | this.anomalyLogId = anomalyLogId; 26 | this.anomalyLog = anomalyLog; 27 | this.anomalyLogList = anomalyLogList; 28 | this.suspectedAnomalyRequest = suspectedAnomalyRequest; 29 | this.anomalyType = anomalyType; 30 | } 31 | 32 | public String getAnomalyLogId() { 33 | return anomalyLogId; 34 | } 35 | 36 | public Tuple7 getAnomalyLog() { 37 | return anomalyLog; 38 | } 39 | 40 | public List getAnomalyLogList() { 41 | return anomalyLogList; 42 | } 43 | 44 | public List getSuspectedAnomalyRequest() { 45 | return suspectedAnomalyRequest; 46 | } 47 | 48 | public String getAnomalyType() { 49 | return anomalyType; 50 | } 51 | 52 | public void setAnomalyLogId(String anomalyLogId) { 53 | this.anomalyLogId = anomalyLogId; 54 | } 55 | 56 | public void setAnomalyLog(Tuple7 anomalyLog) { 57 | this.anomalyLog = anomalyLog; 58 | } 59 | 60 | public void setAnomalyLogList(List anomalyLogList) { 61 | this.anomalyLogList = anomalyLogList; 62 | } 63 | 64 | public void setSuspectedAnomalyRequest(List suspectedAnomalyRequest) { 65 | this.suspectedAnomalyRequest = suspectedAnomalyRequest; 66 | } 67 | 68 | public void setAnomalyType(String anomalyType) { 69 | this.anomalyType = anomalyType; 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/main/java/modelconstruction/MetricsMonitoring.java: -------------------------------------------------------------------------------- 1 | package modelconstruction; 2 | 3 | import TCFGmodel.TCFGUtil; 4 | import org.apache.flink.api.java.utils.ParameterTool; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | import java.io.IOException; 9 | import java.util.LinkedList; 10 | import java.util.Queue; 11 | 12 | public class MetricsMonitoring extends Thread { 13 | 14 | private boolean flag = true; 15 | private final TCFGUtil tcfgUtil = new TCFGUtil(); 16 | private final Logger LOG = LoggerFactory.getLogger(MetricsMonitoring.class); 17 | MovingVariance m = new MovingVariance(10); 18 | 19 | @Override 20 | public void run() { 21 | try { 22 | flag = Boolean.parseBoolean(ParameterTool.fromPropertiesFile("src/main/resources/config.properties").toMap().get("metricsMonitoring")); 23 | LOG.info("{}",flag); 24 | } catch (IOException e) { 25 | e.printStackTrace(); 26 | } 27 | while (flag) { 28 | try { 29 | Thread.sleep(300); 30 | TransferParamMatrix transferParamMatrix = tcfgUtil.getMatrixFromMemory(); 31 | if (transferParamMatrix == null) continue; 32 | double norm = transferParamMatrix.getNorm(); 33 | double var = m.add(norm); 34 | if (var != 0.0 && var < 0.005) { 35 | tcfgUtil.saveTrainingFlag(0); 36 | tcfgUtil.saveDetectionFlag(1); 37 | System.out.println("反馈机制已开启!"); 38 | System.out.println("异常检测已开启!"); 39 | LOG.info("反馈机制已开启!"); 40 | LOG.info("异常检测已开启!"); 41 | cancel(); 42 | } 43 | } catch (NullPointerException ignored) { 44 | 45 | } catch (Exception e) { 46 | e.printStackTrace(); 47 | } 48 | } 49 | } 50 | 51 | public void cancel() { 52 | flag = false; 53 | } 54 | } 55 | 56 | class MovingVariance { 57 | int size; 58 | Queue q = new LinkedList<>(); 59 | double sum = 0; 60 | double avg = 0; 61 | double var = 0; 62 | 63 | MovingVariance(int size) { 64 | this.size = size; 65 | } 66 | 67 | double add(double val) { 68 | if (q.size() >= size) { 69 | sum -= q.element(); 70 | q.poll(); 71 | } 72 | q.offer(val); 73 | sum += val; 74 | avg = sum / q.size(); 75 | var = 0; 76 | for (Double x : q) { 77 | var += Math.pow((x - avg), 2); 78 | } 79 | return var / q.size(); 80 | } 81 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LogFlash 2 | 3 | ## Docker部署 4 | *请先安装docker、docker-compose、node再进行如下步骤。* 5 | 6 | 1. 下载flink 7 | ```bash 8 | cd docker && wget https://mirrors.tuna.tsinghua.edu.cn/apache/flink/flink-1.10.0/flink-1.10.0-bin-scala_2.11.tgz 9 | ``` 10 | 11 | 2. 创建logflash镜像 12 | ```bash 13 | ./build.sh --job-artifacts LogFlash-1.0-SNAPSHOT.jar --flink-path flink-1.10.0-bin-scala_2.11.tgz 14 | ``` 15 | 16 | 3. 创建数据库镜像 17 | ```bash 18 | docker build -t logsql -f Dockerfile.mysql . 19 | ``` 20 | 21 | 4. 创建file2Stream镜像 22 | ```bash 23 | docker build -t file2stream -f Dockerfile.file2stream . 24 | ``` 25 | 26 | 5. 通过resources/config.propeties中的sourceName指定数据输入形式 27 | - file:将日志文件放到resources目录下,格式为resources/<日志类型名>/raw/<日志文件> 28 | - socket:将日志文件放到data目录下,并在logFilePaths中写入日志路径,以/data开头 29 | 30 | 6. 创建web后端服务器镜像 31 | - web服务器使用单独的配置文件连接数据库,创建镜像前,如果修改了MySQL用户名与密码,则要编辑db-server.json配置文件中的用户名与密码与config.properties中一致 32 | 33 | ```bash 34 | docker build -t dbserver -f Dockerfile.dbserver . 35 | ``` 36 | 37 | 7. 启动web前端 38 | ```bash 39 | # 安装依赖 40 | cd LogFlash_web && npm install --registry=https://registry.npm.taobao.org 41 | # 启动服务 42 | nohup npm run dev >web.log 2>&1 & 43 | ``` 44 | 45 | 8. 启动容器 46 | ```bash 47 | cd .. && FLINK_JOB=Entrance docker-compose up -d 48 | ``` 49 | ## 配置参数说明 50 | 1. 日志数据输入配置参数 51 | ``` 52 | sourceName=file or socket #日志数据输入形式(文件或socket) 53 | socketHost #socket服务器地址 54 | socketPort #socket服务器端口号 55 | ``` 56 | 2. 日志模板挖掘参数 57 | ``` 58 | logFormat #日志格式,支持多种格式匹配,以@符号分隔不同日志格式 59 | 示例:logFormat=[][]