├── .gitignore ├── screens ├── nifi-core.png └── nifi-edge.png ├── nifi-storm-examples ├── src │ └── main │ │ ├── resources │ │ └── log-level-count.properties │ │ └── java │ │ └── nifi │ │ └── storm │ │ └── examples │ │ ├── data │ │ ├── LogLevels.java │ │ └── DictionaryBuilder.java │ │ ├── bolt │ │ └── LogLevelWindowBolt.java │ │ ├── LogLevelCountProperties.java │ │ └── LogLevelCountTopology.java └── pom.xml ├── nifi-apex-examples ├── src │ ├── site │ │ └── conf │ │ │ └── my-app-conf1.xml │ ├── test │ │ ├── resources │ │ │ └── log4j.properties │ │ └── java │ │ │ └── nifi │ │ │ └── apex │ │ │ └── examples │ │ │ └── logs │ │ │ └── LogLevelApplicationRunner.java │ ├── assemble │ │ └── appPackage.xml │ └── main │ │ ├── resources │ │ └── META-INF │ │ │ └── properties-LogLevelCount.xml │ │ └── java │ │ └── nifi │ │ └── apex │ │ └── examples │ │ └── logs │ │ ├── data │ │ ├── LogLevels.java │ │ └── DictionaryBuilder.java │ │ ├── operators │ │ └── LogLevelWindowCount.java │ │ ├── LogLevelProperties.java │ │ └── LogLevelApplication.java ├── XmlJavadocCommentsExtractor.xsl └── pom.xml ├── nifi-flink-examples ├── src │ └── main │ │ ├── resources │ │ └── window-log-level.properties │ │ └── java │ │ └── nifi │ │ └── flink │ │ └── examples │ │ └── logs │ │ ├── data │ │ ├── LogLevel.java │ │ ├── LogLevels.java │ │ └── DictionaryBuilder.java │ │ ├── functions │ │ ├── LogLevelWindowCounter.java │ │ └── LogLevelFlatMap.java │ │ ├── WindowLogLevelCount.java │ │ └── WindowLogLevelCountProps.java └── pom.xml ├── pom.xml ├── README.md ├── LICENSE └── templates ├── nifi-log-example-core.xml └── nifi-log-example-edge.xml /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | target 3 | *.iml 4 | dependency-reduced-pom.xml 5 | -------------------------------------------------------------------------------- /screens/nifi-core.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbende/nifi-streaming-examples/HEAD/screens/nifi-core.png -------------------------------------------------------------------------------- /screens/nifi-edge.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbende/nifi-streaming-examples/HEAD/screens/nifi-edge.png -------------------------------------------------------------------------------- /nifi-storm-examples/src/main/resources/log-level-count.properties: -------------------------------------------------------------------------------- 1 | nifi.url=http://localhost:8080/nifi 2 | nifi.input.port=Logs for Analysis 3 | nifi.input.request.batch=5 4 | nifi.output.port=New Dictionary 5 | 6 | log.level.attribute=log.level 7 | 8 | storm.window.milliseconds=60000 9 | storm.rate.threshold=2 -------------------------------------------------------------------------------- /nifi-apex-examples/src/site/conf/my-app-conf1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | dt.attr.MASTER_MEMORY_MB 5 | 1024 6 | 7 | 8 | dt.application.MyFirstApplication.operator.randomGenerator.prop.numTuples 9 | 1000 10 | 11 | 12 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=DEBUG,CONSOLE 2 | 3 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 4 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 5 | log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n 6 | 7 | log4j.logger.org=info 8 | #log4j.logger.org.apache.commons.beanutils=warn 9 | log4j.logger.com.datatorrent=debug 10 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/resources/window-log-level.properties: -------------------------------------------------------------------------------- 1 | nifi.url=http://localhost:8080/nifi 2 | nifi.input.port=Logs for Analysis 3 | nifi.input.request.batch=5 4 | nifi.output.port=New Dictionary 5 | 6 | log.level.attribute=log.level 7 | 8 | flink.window.milliseconds=60000 9 | flink.slide.milliseconds=30000 10 | flink.rate.threshold=2 11 | 12 | flink.output.path=target/ 13 | flink.output.filename=window-log-levels.txt -------------------------------------------------------------------------------- /nifi-apex-examples/src/test/java/nifi/apex/examples/logs/LogLevelApplicationRunner.java: -------------------------------------------------------------------------------- 1 | package nifi.apex.examples.logs; 2 | 3 | import com.datatorrent.api.LocalMode; 4 | import com.datatorrent.api.StreamingApplication; 5 | import org.apache.hadoop.conf.Configuration; 6 | 7 | /** 8 | * Runs the LogLevelApplication in local mode. 9 | */ 10 | public class LogLevelApplicationRunner { 11 | 12 | public static void main(String[] args) throws Exception { 13 | StreamingApplication app = new LogLevelApplication(); 14 | 15 | Configuration conf = new Configuration(false); 16 | conf.addResource(app.getClass().getResourceAsStream( 17 | "/META-INF/properties-LogLevelCount.xml")); 18 | 19 | LocalMode.runApp(app, conf, 140000); 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/java/nifi/flink/examples/logs/data/LogLevel.java: -------------------------------------------------------------------------------- 1 | package nifi.flink.examples.logs.data; 2 | 3 | import org.apache.flink.api.java.tuple.Tuple2; 4 | 5 | import java.io.Serializable; 6 | 7 | /** 8 | * A wrapper for a log level (i.e. warn, error, info) and a count for the level. 9 | */ 10 | public final class LogLevel extends Tuple2 implements Serializable { 11 | 12 | public LogLevel() { 13 | 14 | } 15 | 16 | public LogLevel(String level, int count) { 17 | super(level, count); 18 | } 19 | 20 | public String getLevel() { 21 | return getField(0); 22 | } 23 | 24 | public int getCount() { 25 | return getField(1); 26 | } 27 | 28 | @Override 29 | public String toString() { 30 | return "[ LEVEL = " + getLevel() + ", COUNT = " + getCount() + " ]"; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/java/nifi/flink/examples/logs/functions/LogLevelWindowCounter.java: -------------------------------------------------------------------------------- 1 | package nifi.flink.examples.logs.functions; 2 | 3 | import nifi.flink.examples.logs.data.LogLevel; 4 | import nifi.flink.examples.logs.data.LogLevels; 5 | import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction; 6 | import org.apache.flink.streaming.api.windowing.windows.TimeWindow; 7 | import org.apache.flink.util.Collector; 8 | 9 | /** 10 | * Combines incoming LogLevels to a single object with total counts for each level. 11 | */ 12 | public final class LogLevelWindowCounter implements AllWindowFunction { 13 | 14 | @Override 15 | public void apply(TimeWindow timeWindow, Iterable iterable, Collector collector) 16 | throws Exception { 17 | 18 | LogLevels levels = new LogLevels(); 19 | for (LogLevel logLevel : iterable) { 20 | levels.add(logLevel); 21 | } 22 | 23 | collector.collect(levels); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/java/nifi/flink/examples/logs/data/LogLevels.java: -------------------------------------------------------------------------------- 1 | package nifi.flink.examples.logs.data; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | /** 7 | * A map of log levels. 8 | */ 9 | public class LogLevels { 10 | 11 | private Map levels = new HashMap<>(); 12 | 13 | public void add(LogLevel logLevel) { 14 | int count = logLevel.getCount(); 15 | 16 | if (levels.containsKey(logLevel.getLevel())) { 17 | count += levels.get(logLevel.getLevel()); 18 | } 19 | 20 | levels.put(logLevel.getLevel(), count); 21 | } 22 | 23 | public Map getLevels() { 24 | return levels; 25 | } 26 | 27 | @Override 28 | public String toString() { 29 | final StringBuilder builder = new StringBuilder(); 30 | builder.append("LOG LEVEL COUNTS {"); 31 | 32 | for (Map.Entry entry : levels.entrySet()) { 33 | builder.append("\nLEVEL = ") 34 | .append(entry.getKey()) 35 | .append(", COUNT = ") 36 | .append(entry.getValue()); 37 | } 38 | 39 | builder.append("\n}"); 40 | return builder.toString(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /nifi-storm-examples/src/main/java/nifi/storm/examples/data/LogLevels.java: -------------------------------------------------------------------------------- 1 | package nifi.storm.examples.data; 2 | 3 | import java.io.Serializable; 4 | import java.util.HashMap; 5 | import java.util.Map; 6 | 7 | /** 8 | * A Map of log levels. 9 | * 10 | * @author bbende 11 | */ 12 | public class LogLevels implements Serializable { 13 | 14 | private Map levels = new HashMap<>(); 15 | 16 | public void add(final String level, final int count) { 17 | int totalCount = count; 18 | if (levels.containsKey(level)) { 19 | totalCount += levels.get(level); 20 | } 21 | levels.put(level, totalCount); 22 | } 23 | 24 | public Map getLevels() { 25 | return levels; 26 | } 27 | 28 | @Override 29 | public String toString() { 30 | final StringBuilder builder = new StringBuilder(); 31 | builder.append("LOG LEVEL COUNTS {"); 32 | 33 | for (Map.Entry entry : levels.entrySet()) { 34 | builder.append("\nLEVEL = ") 35 | .append(entry.getKey()) 36 | .append(", COUNT = ") 37 | .append(entry.getValue()); 38 | } 39 | 40 | builder.append("\n}"); 41 | return builder.toString(); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/java/nifi/flink/examples/logs/functions/LogLevelFlatMap.java: -------------------------------------------------------------------------------- 1 | package nifi.flink.examples.logs.functions; 2 | 3 | import nifi.flink.examples.logs.data.LogLevel; 4 | import org.apache.flink.api.common.functions.FlatMapFunction; 5 | import org.apache.flink.streaming.connectors.nifi.NiFiDataPacket; 6 | import org.apache.flink.util.Collector; 7 | 8 | import java.util.Map; 9 | 10 | /** 11 | * A FlatMapFunction that maps a NiFiDataPacket to a tuple of (log-level,1). 12 | */ 13 | public final class LogLevelFlatMap implements FlatMapFunction { 14 | private static final long serialVersionUID = 1L; 15 | 16 | private final String attributeName; 17 | 18 | /** 19 | * @param attributeName the name of an attribute on the NiFiDataPacket containing the log level 20 | */ 21 | public LogLevelFlatMap(final String attributeName) { 22 | this.attributeName = attributeName; 23 | } 24 | 25 | @Override 26 | public void flatMap(NiFiDataPacket niFiDataPacket, Collector collector) 27 | throws Exception { 28 | 29 | Map attributes = niFiDataPacket.getAttributes(); 30 | 31 | if (attributes.containsKey(attributeName)) { 32 | String logLevel = niFiDataPacket.getAttributes().get(attributeName); 33 | collector.collect(new LogLevel(logLevel, 1)); 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | org.apache.nifi 8 | nifi-streaming-examples 9 | pom 10 | 0.0.1-SNAPSHOT 11 | 12 | 13 | 1.0.0 14 | 15 | 16 | 17 | nifi-flink-examples 18 | nifi-apex-examples 19 | nifi-storm-examples 20 | 21 | 22 | 23 | 24 | 25 | org.apache.nifi 26 | nifi-site-to-site-client 27 | ${nifi.version} 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | org.apache.maven.plugins 36 | maven-compiler-plugin 37 | 3.2 38 | 39 | 1.7 40 | 1.7 41 | 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/assemble/appPackage.xml: -------------------------------------------------------------------------------- 1 | 4 | appPackage 5 | 6 | jar 7 | 8 | false 9 | 10 | 11 | ${basedir}/target/ 12 | /app 13 | 14 | ${project.artifactId}-${project.version}.jar 15 | 16 | 17 | 18 | ${basedir}/target/deps 19 | /lib 20 | 21 | 22 | ${basedir}/src/site/conf 23 | /conf 24 | 25 | *.xml 26 | 27 | 28 | 29 | ${basedir}/src/main/resources/META-INF 30 | /META-INF 31 | 32 | 33 | ${basedir}/src/main/resources/app 34 | /app 35 | 36 | 37 | ${basedir}/src/main/resources/resources 38 | /resources 39 | 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /nifi-apex-examples/XmlJavadocCommentsExtractor.xsl: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/main/resources/META-INF/properties-LogLevelCount.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 9 | 10 | 11 | nifi.url 12 | http://localhost:8080/nifi 13 | 14 | 15 | 16 | nifi.input.port 17 | Logs for Analysis 18 | 19 | 20 | 21 | nifi.input.request.batch 22 | 5 23 | 24 | 25 | 26 | nifi.output.port 27 | New Dictionary 28 | 29 | 30 | 31 | log.level.attribute 32 | log.level 33 | 34 | 35 | 36 | log.level.rate.threshold 37 | 2 38 | 39 | 40 | 41 | window.size.millis 42 | 60000 43 | 44 | 45 | 46 | application.window.count 47 | 120 48 | 49 | 50 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/main/java/nifi/apex/examples/logs/data/LogLevels.java: -------------------------------------------------------------------------------- 1 | package nifi.apex.examples.logs.data; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | /** 7 | * Wrapper to hold a Map of log levels to their counts. 8 | */ 9 | public class LogLevels { 10 | 11 | private Map levels = new HashMap<>(); 12 | 13 | /** 14 | * @param level the level to add 15 | * @param count the count for the level to add 16 | */ 17 | public void add(String level, int count) { 18 | if (levels.containsKey(level)) { 19 | count += levels.get(level); 20 | } 21 | 22 | levels.put(level, count); 23 | } 24 | 25 | public Integer get(String level) { 26 | if (levels.containsKey(level)) { 27 | return levels.get(level); 28 | } else { 29 | return 0; 30 | } 31 | } 32 | 33 | /** 34 | * @return true if there are no level counts, false otherwise 35 | */ 36 | public boolean isEmpty() { 37 | return levels.isEmpty(); 38 | } 39 | 40 | /** 41 | * Clears the current state of the level counts 42 | */ 43 | public void clear() { 44 | levels.clear(); 45 | } 46 | 47 | /** 48 | * @return a deep copy of the current instance 49 | */ 50 | public LogLevels deepCopy() { 51 | LogLevels copy = new LogLevels(); 52 | 53 | for (Map.Entry entry : levels.entrySet()) { 54 | copy.add(entry.getKey(), entry.getValue()); 55 | } 56 | 57 | return copy; 58 | } 59 | 60 | @Override 61 | public String toString() { 62 | final StringBuilder builder = new StringBuilder(); 63 | builder.append("LOG LEVEL COUNTS {"); 64 | 65 | for (Map.Entry entry : levels.entrySet()) { 66 | builder.append("\nLEVEL = ") 67 | .append(entry.getKey()) 68 | .append(", COUNT = ") 69 | .append(entry.getValue()); 70 | } 71 | 72 | builder.append("\n}"); 73 | return builder.toString(); 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /nifi-storm-examples/src/main/java/nifi/storm/examples/bolt/LogLevelWindowBolt.java: -------------------------------------------------------------------------------- 1 | package nifi.storm.examples.bolt; 2 | 3 | import nifi.storm.examples.data.LogLevels; 4 | import org.apache.storm.task.OutputCollector; 5 | import org.apache.storm.task.TopologyContext; 6 | import org.apache.storm.topology.OutputFieldsDeclarer; 7 | import org.apache.storm.topology.base.BaseWindowedBolt; 8 | import org.apache.storm.tuple.Fields; 9 | import org.apache.storm.tuple.Tuple; 10 | import org.apache.storm.tuple.Values; 11 | import org.apache.storm.windowing.TupleWindow; 12 | 13 | import java.util.Map; 14 | 15 | /** 16 | * A windowed bolt that takes incoming tuples containing a log level, and produces map containing 17 | * the total of each level with in the window. 18 | * 19 | * @author bbende 20 | */ 21 | public class LogLevelWindowBolt extends BaseWindowedBolt { 22 | 23 | private final String logLevelAttribute; 24 | private OutputCollector collector; 25 | 26 | /** 27 | * @param logLevelAttribute the name of the field in the tuple containing the log level 28 | */ 29 | public LogLevelWindowBolt(String logLevelAttribute) { 30 | this.logLevelAttribute = logLevelAttribute; 31 | } 32 | 33 | @Override 34 | public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { 35 | this.collector = collector; 36 | } 37 | 38 | 39 | @Override 40 | public void execute(TupleWindow tupleWindow) { 41 | final LogLevels logLevels = new LogLevels(); 42 | 43 | // for each tuple get the log level and update the count in the levels map 44 | for(Tuple tuple: tupleWindow.get()) { 45 | if (tuple.contains(logLevelAttribute)) { 46 | final String logLevel = tuple.getStringByField(logLevelAttribute); 47 | logLevels.add(logLevel, 1); 48 | } 49 | } 50 | 51 | // emit the whole map of counts as the results 52 | collector.emit(new Values(logLevels)); 53 | 54 | // ack all the tuples, should we do this in the above loop?? 55 | for(Tuple tuple: tupleWindow.get()) { 56 | collector.ack(tuple); 57 | } 58 | } 59 | 60 | @Override 61 | public void declareOutputFields(OutputFieldsDeclarer declarer) { 62 | declarer.declare(new Fields("counts")); 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/main/java/nifi/apex/examples/logs/operators/LogLevelWindowCount.java: -------------------------------------------------------------------------------- 1 | package nifi.apex.examples.logs.operators; 2 | 3 | import com.datatorrent.api.DefaultInputPort; 4 | import com.datatorrent.api.DefaultOutputPort; 5 | import com.datatorrent.common.util.BaseOperator; 6 | import com.datatorrent.contrib.nifi.NiFiDataPacket; 7 | import nifi.apex.examples.logs.data.LogLevels; 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | 11 | import java.util.Map; 12 | 13 | /** 14 | * Operator that counts the log levels over a given window. The log level is provided through 15 | * an attribute on each NiFiDataPacket, the name of the attribute is provided through the constructor 16 | * of this operator. 17 | */ 18 | public class LogLevelWindowCount extends BaseOperator { 19 | 20 | private static final Logger LOG = LoggerFactory.getLogger(LogLevelWindowCount.class); 21 | 22 | private final String logLevelAttribute; 23 | private final LogLevels logLevels = new LogLevels(); 24 | 25 | private LogLevelWindowCount() { 26 | logLevelAttribute = null; 27 | } 28 | 29 | public LogLevelWindowCount(String logLevelAttribute) { 30 | this.logLevelAttribute = logLevelAttribute; 31 | } 32 | 33 | /** 34 | * Input port on which NiFiDataPackets are received. 35 | */ 36 | public final transient DefaultInputPort input = new DefaultInputPort() { 37 | 38 | @Override 39 | public void process(NiFiDataPacket niFiDataPacket) { 40 | final Map attrs = niFiDataPacket.getAttributes(); 41 | 42 | // extract the log level and add it to the level counts for the current window 43 | if (attrs != null && attrs.containsKey(logLevelAttribute)) { 44 | logLevels.add(attrs.get(logLevelAttribute), 1); 45 | } 46 | } 47 | }; 48 | 49 | /** 50 | * Output port which emits the map of log levels and their counts for current window 51 | */ 52 | public final transient DefaultOutputPort output = new DefaultOutputPort<>(); 53 | 54 | @Override 55 | public void endWindow() { 56 | LOG.info("LogLevelWindowCount: endWindow"); 57 | 58 | // if no counts for this window then do nothing 59 | if (logLevels.isEmpty()) { 60 | LOG.debug("Levels was empty, returning..."); 61 | return; 62 | } 63 | 64 | // otherwise emit a copy of the counts and clear the counts for next window 65 | output.emit(logLevels.deepCopy()); 66 | logLevels.clear(); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/main/java/nifi/apex/examples/logs/LogLevelProperties.java: -------------------------------------------------------------------------------- 1 | package nifi.apex.examples.logs; 2 | 3 | import org.apache.hadoop.conf.Configuration; 4 | 5 | public class LogLevelProperties { 6 | 7 | public static final String NIFI_URL = "nifi.url"; 8 | public static final String NIFI_INPUT_PORT = "nifi.input.port"; 9 | public static final String NIFI_INPUT_REQUEST_BATCH = "nifi.input.request.batch"; 10 | public static final String NIFI_OUTPUT_PORT = "nifi.output.port"; 11 | public static final String LOG_LEVEL_ATTRIBUTE = "log.level.attribute"; 12 | public static final String LOG_LEVEL_THRESHOLD = "log.level.rate.threshold"; 13 | public static final String WINDOW_SIZE_MILLIS = "window.size.millis"; 14 | public static final String APP_WINDOW_COUNT = "application.window.count"; 15 | 16 | private final String nifiUrl; 17 | private final String nifiInputPort; 18 | private final String nifiOutputPort; 19 | private final int nifiRequestBatch; 20 | private final double logLevelThreshold; 21 | private final String logLevelAttribute; 22 | private final int windowMillis; 23 | private final int appWindowCount; 24 | 25 | public LogLevelProperties(Configuration conf) { 26 | nifiUrl = loadProperty(conf, NIFI_URL); 27 | nifiInputPort = loadProperty(conf, NIFI_INPUT_PORT); 28 | nifiOutputPort = loadProperty(conf, NIFI_OUTPUT_PORT); 29 | 30 | final String tempNiFiRequestBatch = loadProperty(conf, NIFI_INPUT_REQUEST_BATCH); 31 | nifiRequestBatch = Integer.parseInt(tempNiFiRequestBatch); 32 | 33 | final String tempLogLevelThreshold = loadProperty(conf, LOG_LEVEL_THRESHOLD); 34 | logLevelThreshold = Double.parseDouble(tempLogLevelThreshold); 35 | 36 | logLevelAttribute = loadProperty(conf, LOG_LEVEL_ATTRIBUTE); 37 | 38 | final String tempWindowMillis = loadProperty(conf, WINDOW_SIZE_MILLIS); 39 | windowMillis = Integer.parseInt(tempWindowMillis); 40 | 41 | final String tempAppWindowCount = loadProperty(conf, APP_WINDOW_COUNT); 42 | appWindowCount = Integer.parseInt(tempAppWindowCount); 43 | } 44 | 45 | private String loadProperty(final Configuration conf, final String name) { 46 | final String value = conf.get(name); 47 | if (value == null || value.trim().isEmpty()) { 48 | throw new IllegalStateException(name + " is a required property"); 49 | } 50 | return value.trim(); 51 | } 52 | 53 | public String getNifiUrl() { 54 | return nifiUrl; 55 | } 56 | 57 | public String getNifiInputPort() { 58 | return nifiInputPort; 59 | } 60 | 61 | public String getNifiOutputPort() { 62 | return nifiOutputPort; 63 | } 64 | 65 | public int getNifiRequestBatch() { 66 | return nifiRequestBatch; 67 | } 68 | 69 | public double getLogLevelThreshold() { 70 | return logLevelThreshold; 71 | } 72 | 73 | public String getLogLevelAttribute() { 74 | return logLevelAttribute; 75 | } 76 | 77 | public int getWindowMillis() { 78 | return windowMillis; 79 | } 80 | 81 | public int getAppWindowCount() { 82 | return appWindowCount; 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /nifi-storm-examples/src/main/java/nifi/storm/examples/LogLevelCountProperties.java: -------------------------------------------------------------------------------- 1 | package nifi.storm.examples; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Properties; 6 | 7 | /** 8 | * @author bbende. 9 | */ 10 | public class LogLevelCountProperties { 11 | 12 | static final String NIFI_URL = "nifi.url"; 13 | static final String NIFI_INPUT_PORT = "nifi.input.port"; 14 | static final String NIFI_REQUEST_BATCH = "nifi.input.request.batch"; 15 | static final String NIFI_OUTPUT_PORT = "nifi.output.port"; 16 | 17 | static final String STORM_WINDOW_MILLIS = "storm.window.milliseconds"; 18 | static final String STORM_RATE_THRESHOLD = "storm.rate.threshold"; 19 | 20 | static final String LOG_LEVEL_ATTRIBUTE = "log.level.attribute"; 21 | 22 | private final String nifiUrl; 23 | private final String nifiInputPort; 24 | private final String nifiOutputPort; 25 | private final int nifiRequestBatch; 26 | private final int stormWindowMillis; 27 | private final double stormRateThreshold; 28 | private final String logLevelAttribute; 29 | 30 | public LogLevelCountProperties(final String propertiesFile) throws IOException { 31 | final InputStream in = this.getClass().getClassLoader().getResourceAsStream(propertiesFile); 32 | 33 | final Properties properties = new Properties(); 34 | properties.load(in); 35 | 36 | nifiUrl = loadProperty(properties, NIFI_URL); 37 | nifiInputPort = loadProperty(properties, NIFI_INPUT_PORT); 38 | nifiOutputPort = loadProperty(properties, NIFI_OUTPUT_PORT); 39 | 40 | final String tempNiFiRequestBatch = loadProperty(properties, NIFI_REQUEST_BATCH); 41 | nifiRequestBatch = Integer.parseInt(tempNiFiRequestBatch); 42 | 43 | final String tempStormWindowSize = loadProperty(properties, STORM_WINDOW_MILLIS); 44 | stormWindowMillis = Integer.parseInt(tempStormWindowSize); 45 | 46 | final String tempStormRateThreshold = loadProperty(properties, STORM_RATE_THRESHOLD); 47 | stormRateThreshold = Double.parseDouble(tempStormRateThreshold); 48 | 49 | logLevelAttribute = loadProperty(properties, LOG_LEVEL_ATTRIBUTE); 50 | } 51 | 52 | private String loadProperty(final Properties properties, final String name) { 53 | final String value = properties.getProperty(name); 54 | if (value == null || value.trim().isEmpty()) { 55 | throw new IllegalStateException(name + " is a required property"); 56 | } 57 | return value.trim(); 58 | } 59 | 60 | public String getNifiUrl() { 61 | return nifiUrl; 62 | } 63 | 64 | public String getNifiInputPort() { 65 | return nifiInputPort; 66 | } 67 | 68 | public String getNifiOutputPort() { 69 | return nifiOutputPort; 70 | } 71 | 72 | public int getNifiRequestBatch() { 73 | return nifiRequestBatch; 74 | } 75 | 76 | public int getStormWindowMillis() { 77 | return stormWindowMillis; 78 | } 79 | 80 | public double getStormRateThreshold() { 81 | return stormRateThreshold; 82 | } 83 | 84 | public String getLogLevelAttribute() { 85 | return logLevelAttribute; 86 | } 87 | 88 | } 89 | -------------------------------------------------------------------------------- /nifi-storm-examples/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | nifi-streaming-examples 7 | org.apache.nifi 8 | 0.0.1-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | nifi-storm-examples 13 | 14 | 15 | 1.0.2 16 | 17 | 18 | 19 | 20 | org.apache.nifi 21 | nifi-storm-spout 22 | 1.0.0 23 | 24 | 25 | org.apache.storm 26 | storm-core 27 | ${storm.version} 28 | 29 | 30 | 31 | 32 | 33 | LogLevelCountTopology 34 | 35 | 36 | 37 | org.apache.maven.plugins 38 | maven-shade-plugin 39 | 2.2 40 | 41 | 42 | package 43 | 44 | shade 45 | 46 | 47 | 48 | 49 | 50 | nifi.storm.examples.LogLevelCountTopology 51 | 52 | 53 | 54 | 55 | 56 | *:* 57 | 58 | META-INF/*.SF 59 | META-INF/*.DSA 60 | META-INF/*.RSA 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/main/java/nifi/apex/examples/logs/data/DictionaryBuilder.java: -------------------------------------------------------------------------------- 1 | package nifi.apex.examples.logs.data; 2 | 3 | import com.datatorrent.contrib.nifi.NiFiDataPacket; 4 | import com.datatorrent.contrib.nifi.NiFiDataPacketBuilder; 5 | import com.datatorrent.contrib.nifi.StandardNiFiDataPacket; 6 | 7 | import java.nio.charset.StandardCharsets; 8 | import java.util.HashMap; 9 | import java.util.Map; 10 | 11 | /** 12 | * Produces a dictionary file for NiFi containing the log levels that should be 13 | * collected based on the rate of error and warn messages coming in. 14 | */ 15 | public class DictionaryBuilder implements NiFiDataPacketBuilder { 16 | 17 | public static final String ERROR = "ERROR"; 18 | public static final String WARN = "WARN"; 19 | public static final String INFO = "INFO"; 20 | public static final String DEBUG = "DEBUG"; 21 | 22 | public static final String ERROR_WARN_TOTAL_ATTR = "error.warn.total"; 23 | public static final String ERROR_WARN_RATE_ATTR = "error.warn.rate"; 24 | public static final String WINDOW_MILLIS_ATTR = "window.size.millis"; 25 | 26 | private final int windowSizeMillis; 27 | private final double minRatePerSecond; 28 | 29 | private DictionaryBuilder() { 30 | this.windowSizeMillis = 0; 31 | this.minRatePerSecond = 0; 32 | } 33 | 34 | /** 35 | * @param windowSizeMillis the windowSize in milliseconds that the LogLevels were computed over 36 | * 37 | * @param minRatePerSecond the rate of error and warn messages over the window that indicates not to 38 | * collect other levels, if the actual rate is less we can collect more 39 | */ 40 | public DictionaryBuilder(final int windowSizeMillis, final double minRatePerSecond) { 41 | this.windowSizeMillis = windowSizeMillis; 42 | this.minRatePerSecond = minRatePerSecond; 43 | } 44 | 45 | @Override 46 | public NiFiDataPacket createNiFiDataPacket(LogLevels logLevels) { 47 | // get the total number of error and warn messages 48 | int totalWarnError = 0; 49 | totalWarnError += logLevels.get(ERROR); 50 | totalWarnError += logLevels.get(WARN); 51 | 52 | // calculate the number of ERROR/WARN messages per second 53 | double windowSizeSeconds = windowSizeMillis / 1000; 54 | double actualRate = ((double)totalWarnError) / windowSizeSeconds; 55 | 56 | // always collect ERROR/WARN messages 57 | StringBuilder builder = new StringBuilder(); 58 | builder.append(ERROR).append("\n"); 59 | builder.append(WARN).append("\n"); 60 | 61 | // only collect INFO and DEBUG if ERROR/WARN is less than minimum rate 62 | if (actualRate < minRatePerSecond) { 63 | builder.append(INFO).append("\n"); 64 | builder.append(DEBUG).append("\n"); 65 | } 66 | 67 | // pass the rate, total, and window size as attributes 68 | Map attrs = new HashMap<>(); 69 | attrs.put(ERROR_WARN_RATE_ATTR, String.valueOf(actualRate)); 70 | attrs.put(ERROR_WARN_TOTAL_ATTR, String.valueOf(totalWarnError)); 71 | attrs.put(WINDOW_MILLIS_ATTR, String.valueOf(windowSizeMillis)); 72 | 73 | byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); 74 | return new StandardNiFiDataPacket(content, attrs); 75 | } 76 | 77 | } 78 | -------------------------------------------------------------------------------- /nifi-flink-examples/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | nifi-streaming-examples 7 | org.apache.nifi 8 | 0.0.1-SNAPSHOT 9 | 10 | 4.0.0 11 | 12 | nifi-flink-examples 13 | 14 | 15 | 1.1.2 16 | 17 | 18 | 19 | 20 | org.apache.flink 21 | flink-connector-nifi_2.10 22 | ${flink.version} 23 | 24 | 25 | org.apache.flink 26 | flink-streaming-java_2.10 27 | ${flink.version} 28 | 29 | 30 | 31 | 32 | 33 | WindowLogLevelCount 34 | 35 | 36 | 37 | org.apache.maven.plugins 38 | maven-shade-plugin 39 | 2.2 40 | 41 | 42 | package 43 | 44 | shade 45 | 46 | 47 | 48 | 49 | 50 | nifi.flink.examples.logs.WindowLogLevelCount 51 | 52 | 53 | 54 | 55 | 56 | *:* 57 | 58 | META-INF/*.SF 59 | META-INF/*.DSA 60 | META-INF/*.RSA 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Apache NiFi Streaming Examples 2 | Collection of examples integrating NiFi with stream process frameworks. 3 | 4 | ## Initial Setup 5 | 6 | * Download the latest [Apache NiFi release](https://nifi.apache.org/download.html) 7 | 8 | * Extract the tar and create two instances of NiFi: 9 |
  
10 |     tar xzvf nifi-1.0.0-bin.tar.gz
11 |     mv nifi-1.0.0 nifi-edge
12 |     tar xzvf nifi-1.0.0-bin.tar.gz
13 |     mv nifi-1.0.0 nifi-core
14 |   
15 | * Configure the edge instance by editing nifi-edge/conf/nifi.properties and setting the following properties: 16 |
  
17 |     nifi.remote.input.socket.port=7088
18 |     nifi.remote.input.secure=false
19 |     nifi.web.http.port=7080
20 |   
21 | * Configure the core instance by editing nifi-core/conf/nifi.properties and setting the following properties: 22 |
 
23 |     nifi.remote.input.socket.port=8088
24 |     nifi.remote.input.secure=false
25 |     nifi.web.http.port=8080
26 |   
27 | * Start both instances 28 |
 
29 |     ./nifi-core/bin/nifi.sh start
30 |     ./nifi-edge/bin/nifi.sh start
31 |   
32 | * Open the UI for both instances in a browser 33 |
 
34 |     http://localhost:7080/nifi/
35 |     http://localhost:8080/nifi/
36 |   
37 | * Setup initial dictionary files 38 |

39 |     mkdir nifi-edge/data
40 |     mkdir nifi-edge/data/dictionary
41 |     mkdir nifi-core/data
42 |     mkdir nifi-core/data/dictionary
43 |   
44 | * In each of the above dictionary directories, create a file called levels.txt with the content: 45 |

46 |     ERROR
47 |     WARN
48 | 
49 | 50 | * Import nifi-streaming-examples/templates/nifi-log-example-edge.xml into the the edge instance (http://localhost:7080/nifi) 51 | 52 | * Import nifi-streaming-examples/templates/nifi-log-example-core.xml into the the core instance (http://localhost:8080/nifi) 53 | 54 | * Start everything on the core instance (http://localhost:8080/nifi) 55 | ![Image](https://github.com/bbende/nifi-streaming-examples/blob/master/screens/nifi-core.png?raw=true) 56 | 57 | * To start sending logs, starting everything on the edge instance (http://localhost:8080/nifi) EXCEPT the TailFile processor, the "Generate Test Logs" process group will send fake log messages 58 | ![Image](https://github.com/bbende/nifi-streaming-examples/blob/master/screens/nifi-edge.png?raw=true) 59 | 60 | * To tail a real file, stop the "Generate Test Logs" process group, configure TailFile to point to your log file of choice, and start the TailFile processor 61 | 62 | ## Flink - WindowLogLevelCount - Setup 63 | * For local testing, run a standalone Flink streaming job 64 |

65 |   cd nifi-flink-examples
66 |   mvn clean package -PWindowLogLevelCount
67 |   java -jar target/nifi-flink-examples-0.0.1-SNAPSHOT.jar
68 | 
69 | 70 | ## Apex - LogLevelApplicationRunner - Setup 71 | 72 | * For local testing, run LogLevelApplicationRunner from your favorite IDE: 73 |

74 |   nifi-apex-examples/src/test/java/nifi/apex/examples/logs/LogLevelApplicationRunner.java
75 | 
76 | 77 | ## Storm - LogLevelCountTopology - Setup 78 | 79 | * For local testing, run a standalone local Storm topology 80 |

81 |   cd nifi-storm-examples
82 |   mvn clean package -PLogLevelCountTopology
83 |   java -jar target/nifi-storm-examples-0.0.1-SNAPSHOT.jar
84 | 
85 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/java/nifi/flink/examples/logs/data/DictionaryBuilder.java: -------------------------------------------------------------------------------- 1 | package nifi.flink.examples.logs.data; 2 | 3 | import org.apache.flink.api.common.functions.RuntimeContext; 4 | import org.apache.flink.streaming.connectors.nifi.NiFiDataPacket; 5 | import org.apache.flink.streaming.connectors.nifi.NiFiDataPacketBuilder; 6 | import org.apache.flink.streaming.connectors.nifi.StandardNiFiDataPacket; 7 | 8 | import java.nio.charset.StandardCharsets; 9 | import java.util.HashMap; 10 | import java.util.Map; 11 | 12 | /** 13 | * Produces a dictionary file for NiFi containing the log levels that should be 14 | * collected based on the rate of error and warn messages coming in. 15 | */ 16 | public class DictionaryBuilder implements NiFiDataPacketBuilder { 17 | 18 | public static final String ERROR = "ERROR"; 19 | public static final String WARN = "WARN"; 20 | public static final String INFO = "INFO"; 21 | public static final String DEBUG = "DEBUG"; 22 | 23 | public static final String ERROR_WARN_TOTAL_ATTR = "error.warn.total"; 24 | public static final String ERROR_WARN_RATE_ATTR = "error.warn.rate"; 25 | public static final String WINDOW_MILLIS_ATTR = "window.size.millis"; 26 | 27 | private final int windowSizeMillis; 28 | private final double minRatePerSecond; 29 | 30 | /** 31 | * @param windowSizeMillis the windowSize in milliseconds that the LogLevels were computed over 32 | * 33 | * @param minRatePerSecond the rate of error and warn messages over the window that indicates not to 34 | * collect other levels, if the actual rate is less we can collect more 35 | */ 36 | public DictionaryBuilder(final int windowSizeMillis, final double minRatePerSecond) { 37 | this.windowSizeMillis = windowSizeMillis; 38 | this.minRatePerSecond = minRatePerSecond; 39 | } 40 | 41 | @Override 42 | public NiFiDataPacket createNiFiDataPacket(LogLevels logLevels, RuntimeContext runtimeContext) { 43 | Map counts = logLevels.getLevels(); 44 | 45 | // get the total number of error and warn messages 46 | int totalWarnError = 0; 47 | if (counts.containsKey(ERROR)) { 48 | totalWarnError += counts.get(ERROR); 49 | } 50 | if (counts.containsKey(WARN)) { 51 | totalWarnError += counts.get(WARN); 52 | } 53 | 54 | // calculate the number of ERROR/WARN messages per second 55 | double windowSizeSeconds = windowSizeMillis / 1000; 56 | double actualRate = ((double)totalWarnError) / windowSizeSeconds; 57 | 58 | // always collect ERROR/WARN messages 59 | StringBuilder builder = new StringBuilder(); 60 | builder.append(ERROR).append("\n"); 61 | builder.append(WARN).append("\n"); 62 | 63 | // only collect INFO and DEBUG if ERROR/WARN is less than minimum rate 64 | if (actualRate < minRatePerSecond) { 65 | builder.append(INFO).append("\n"); 66 | builder.append(DEBUG).append("\n"); 67 | } 68 | 69 | // pass the rate, total, and window size as attributes 70 | Map attrs = new HashMap<>(); 71 | attrs.put(ERROR_WARN_RATE_ATTR, String.valueOf(actualRate)); 72 | attrs.put(ERROR_WARN_TOTAL_ATTR, String.valueOf(totalWarnError)); 73 | attrs.put(WINDOW_MILLIS_ATTR, String.valueOf(windowSizeMillis)); 74 | 75 | byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); 76 | return new StandardNiFiDataPacket(content, attrs); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /nifi-storm-examples/src/main/java/nifi/storm/examples/data/DictionaryBuilder.java: -------------------------------------------------------------------------------- 1 | package nifi.storm.examples.data; 2 | 3 | import org.apache.nifi.storm.NiFiDataPacket; 4 | import org.apache.nifi.storm.NiFiDataPacketBuilder; 5 | import org.apache.nifi.storm.StandardNiFiDataPacket; 6 | import org.apache.storm.tuple.Tuple; 7 | 8 | import java.io.Serializable; 9 | import java.nio.charset.StandardCharsets; 10 | import java.util.HashMap; 11 | import java.util.Map; 12 | 13 | /** 14 | * Produces a dictionary file for NiFi containing the log levels that should be 15 | * collected based on the rate of error and warn messages coming in. 16 | * 17 | * @author bbende 18 | */ 19 | public class DictionaryBuilder implements NiFiDataPacketBuilder, Serializable { 20 | 21 | public static final String ERROR = "ERROR"; 22 | public static final String WARN = "WARN"; 23 | public static final String INFO = "INFO"; 24 | public static final String DEBUG = "DEBUG"; 25 | 26 | public static final String ERROR_WARN_TOTAL_ATTR = "error.warn.total"; 27 | public static final String ERROR_WARN_RATE_ATTR = "error.warn.rate"; 28 | public static final String WINDOW_MILLIS_ATTR = "window.size.millis"; 29 | 30 | private final int windowSizeMillis; 31 | private final double minRatePerSecond; 32 | 33 | /** 34 | * @param windowSizeMillis the windowSize in milliseconds that the LogLevels were computed over 35 | * 36 | * @param minRatePerSecond the rate of error and warn messages over the window that indicates not to 37 | * collect other levels, if the actual rate is less we can collect more 38 | */ 39 | public DictionaryBuilder(final int windowSizeMillis, final double minRatePerSecond) { 40 | this.windowSizeMillis = windowSizeMillis; 41 | this.minRatePerSecond = minRatePerSecond; 42 | } 43 | 44 | @Override 45 | public NiFiDataPacket createNiFiDataPacket(Tuple tuple) { 46 | final LogLevels logLevels = (LogLevels) tuple.getValue(0); 47 | 48 | Map counts = logLevels.getLevels(); 49 | 50 | // get the total number of error and warn messages 51 | int totalWarnError = 0; 52 | if (counts.containsKey(ERROR)) { 53 | totalWarnError += counts.get(ERROR); 54 | } 55 | if (counts.containsKey(WARN)) { 56 | totalWarnError += counts.get(WARN); 57 | } 58 | 59 | // calculate the number of ERROR/WARN messages per second 60 | double windowSizeSeconds = windowSizeMillis / 1000; 61 | double actualRate = ((double)totalWarnError) / windowSizeSeconds; 62 | 63 | // always collect ERROR/WARN messages 64 | StringBuilder builder = new StringBuilder(); 65 | builder.append(ERROR).append("\n"); 66 | builder.append(WARN).append("\n"); 67 | 68 | // only collect INFO and DEBUG if ERROR/WARN is less than minimum rate 69 | if (actualRate < minRatePerSecond) { 70 | builder.append(INFO).append("\n"); 71 | builder.append(DEBUG).append("\n"); 72 | } 73 | 74 | // pass the rate, total, and window size as attributes 75 | Map attrs = new HashMap<>(); 76 | attrs.put(ERROR_WARN_RATE_ATTR, String.valueOf(actualRate)); 77 | attrs.put(ERROR_WARN_TOTAL_ATTR, String.valueOf(totalWarnError)); 78 | attrs.put(WINDOW_MILLIS_ATTR, String.valueOf(windowSizeMillis)); 79 | 80 | byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); 81 | return new StandardNiFiDataPacket(content, attrs); 82 | } 83 | 84 | } 85 | -------------------------------------------------------------------------------- /nifi-apex-examples/src/main/java/nifi/apex/examples/logs/LogLevelApplication.java: -------------------------------------------------------------------------------- 1 | package nifi.apex.examples.logs; 2 | 3 | import com.datatorrent.api.Context; 4 | import com.datatorrent.api.DAG; 5 | import com.datatorrent.api.StreamingApplication; 6 | import com.datatorrent.api.annotation.ApplicationAnnotation; 7 | import com.datatorrent.contrib.nifi.NiFiDataPacketBuilder; 8 | import com.datatorrent.contrib.nifi.NiFiSinglePortInputOperator; 9 | import com.datatorrent.contrib.nifi.NiFiSinglePortOutputOperator; 10 | import nifi.apex.examples.logs.data.DictionaryBuilder; 11 | import nifi.apex.examples.logs.data.LogLevels; 12 | import nifi.apex.examples.logs.operators.LogLevelWindowCount; 13 | import org.apache.apex.malhar.lib.wal.WindowDataManager; 14 | import org.apache.hadoop.conf.Configuration; 15 | import org.apache.nifi.remote.client.SiteToSiteClient; 16 | 17 | /** 18 | * Application that pulls logs from NiFi and calculates the rate of ERROR/WARN messages 19 | * over a time window and sends a new set of levels to collect based on the rate. 20 | */ 21 | @ApplicationAnnotation(name="LogLevelCount") 22 | public class LogLevelApplication implements StreamingApplication { 23 | 24 | @Override 25 | public void populateDAG(DAG dag, Configuration configuration) { 26 | LogLevelProperties props = new LogLevelProperties(configuration); 27 | 28 | //dag.setAttribute(Context.DAGContext.STREAMING_WINDOW_SIZE_MILLIS, props.getWindowMillis()); 29 | 30 | // create the operator to receive data from NiFi 31 | WindowDataManager inManager = new WindowDataManager.NoopWindowDataManager(); 32 | NiFiSinglePortInputOperator nifiInput = getNiFiInput(dag, props, inManager); 33 | 34 | // create the operator to count log levels over a window 35 | String attributName = props.getLogLevelAttribute(); 36 | LogLevelWindowCount count = dag.addOperator("count", new LogLevelWindowCount(attributName)); 37 | dag.setAttribute(count, Context.OperatorContext.APPLICATION_WINDOW_COUNT, props.getAppWindowCount()); 38 | 39 | // create the operator to send data back to NiFi 40 | WindowDataManager outManager = new WindowDataManager.NoopWindowDataManager(); 41 | NiFiSinglePortOutputOperator nifiOutput = getNiFiOutput(dag, props, outManager); 42 | 43 | // configure the dag to get nifi-in -> count -> nifi-out 44 | dag.addStream("nifi-in-count", nifiInput.outputPort, count.input); 45 | dag.addStream("count-nifi-out", count.output, nifiOutput.inputPort); 46 | } 47 | 48 | private NiFiSinglePortInputOperator getNiFiInput(DAG dag, LogLevelProperties props, WindowDataManager windowDataManager) { 49 | final SiteToSiteClient.Builder inputConfig = new SiteToSiteClient.Builder() 50 | .url(props.getNifiUrl()) 51 | .portName(props.getNifiInputPort()) 52 | .requestBatchCount(props.getNifiRequestBatch()); 53 | 54 | return dag.addOperator("nifi-in", new NiFiSinglePortInputOperator(inputConfig, windowDataManager)); 55 | } 56 | 57 | private NiFiSinglePortOutputOperator getNiFiOutput(DAG dag, LogLevelProperties props, WindowDataManager windowDataManager) { 58 | final SiteToSiteClient.Builder outputConfig = new SiteToSiteClient.Builder() 59 | .url(props.getNifiUrl()) 60 | .portName(props.getNifiOutputPort()); 61 | 62 | final int batchSize = 1; 63 | final NiFiDataPacketBuilder dataPacketBuilder = new DictionaryBuilder( 64 | props.getWindowMillis(), props.getLogLevelThreshold()); 65 | 66 | return dag.addOperator("nifi-out", new NiFiSinglePortOutputOperator( 67 | outputConfig, dataPacketBuilder, windowDataManager ,batchSize)); 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/java/nifi/flink/examples/logs/WindowLogLevelCount.java: -------------------------------------------------------------------------------- 1 | package nifi.flink.examples.logs; 2 | 3 | import nifi.flink.examples.logs.data.DictionaryBuilder; 4 | import nifi.flink.examples.logs.data.LogLevels; 5 | import nifi.flink.examples.logs.functions.LogLevelFlatMap; 6 | import nifi.flink.examples.logs.functions.LogLevelWindowCounter; 7 | import org.apache.flink.streaming.api.datastream.DataStream; 8 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 9 | import org.apache.flink.streaming.api.functions.source.SourceFunction; 10 | import org.apache.flink.streaming.api.windowing.time.Time; 11 | import org.apache.flink.streaming.connectors.nifi.NiFiDataPacket; 12 | import org.apache.flink.streaming.connectors.nifi.NiFiDataPacketBuilder; 13 | import org.apache.flink.streaming.connectors.nifi.NiFiSink; 14 | import org.apache.flink.streaming.connectors.nifi.NiFiSource; 15 | import org.apache.nifi.remote.client.SiteToSiteClient; 16 | import org.apache.nifi.remote.client.SiteToSiteClientConfig; 17 | 18 | import java.util.concurrent.TimeUnit; 19 | 20 | /** 21 | * Flink Streaming application that receives log data from NiFi and groups the logs by their 22 | * level, counting the number of logs per level over window. 23 | * 24 | * @author bbende 25 | */ 26 | public class WindowLogLevelCount { 27 | 28 | public static final String DEFAULT_PROPERTIES_FILE = "window-log-level.properties"; 29 | 30 | public static void main(String[] args) throws Exception { 31 | String propertiesFile = DEFAULT_PROPERTIES_FILE; 32 | if (args != null && args.length == 1 && args[0] != null) { 33 | propertiesFile = args[0]; 34 | } 35 | 36 | WindowLogLevelCountProps props = new WindowLogLevelCountProps(propertiesFile); 37 | 38 | // Set up the execution environment 39 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 40 | 41 | // Configure the SiteToSiteClient 42 | SiteToSiteClientConfig clientConfig = getSourceConfig(props); 43 | 44 | // Create our data stream with a NiFiSource 45 | SourceFunction nifiSource = new NiFiSource(clientConfig); 46 | DataStream streamSource = env.addSource(nifiSource); 47 | 48 | int windowSize = props.getFlinkWindowMillis(); 49 | LogLevelFlatMap logLevelFlatMap = new LogLevelFlatMap(props.getLogLevelAttribute()); 50 | 51 | // Count the occurrences of each log level over a window 52 | DataStream counts = 53 | streamSource.flatMap(logLevelFlatMap) 54 | .timeWindowAll(Time.of(windowSize, TimeUnit.MILLISECONDS)) 55 | .apply(new LogLevelWindowCounter()); 56 | 57 | // Add the sink to send the dictionary back to NiFi 58 | double rateThreshold = props.getFlinkRateThreshold(); 59 | SiteToSiteClientConfig sinkConfig = getSinkConfig(props); 60 | NiFiDataPacketBuilder builder = new DictionaryBuilder(windowSize, rateThreshold); 61 | counts.addSink(new NiFiSink<>(sinkConfig, builder)); 62 | 63 | // execute program 64 | env.execute("WindowLogLevelCount"); 65 | } 66 | 67 | private static SiteToSiteClientConfig getSourceConfig(WindowLogLevelCountProps props) { 68 | return new SiteToSiteClient.Builder() 69 | .url(props.getNifiUrl()) 70 | .portName(props.getNifiInputPort()) 71 | .requestBatchCount(props.getNifiRequestBatch()) 72 | .buildConfig(); 73 | } 74 | 75 | private static SiteToSiteClientConfig getSinkConfig(WindowLogLevelCountProps props) { 76 | return new SiteToSiteClient.Builder() 77 | .url(props.getNifiUrl()) 78 | .portName(props.getNifiOutputPort()) 79 | .buildConfig(); 80 | } 81 | 82 | } 83 | -------------------------------------------------------------------------------- /nifi-storm-examples/src/main/java/nifi/storm/examples/LogLevelCountTopology.java: -------------------------------------------------------------------------------- 1 | package nifi.storm.examples; 2 | 3 | import nifi.storm.examples.bolt.LogLevelWindowBolt; 4 | import nifi.storm.examples.data.DictionaryBuilder; 5 | import org.apache.nifi.remote.client.SiteToSiteClient; 6 | import org.apache.nifi.remote.client.SiteToSiteClientConfig; 7 | import org.apache.nifi.storm.NiFiBolt; 8 | import org.apache.nifi.storm.NiFiDataPacketBuilder; 9 | import org.apache.nifi.storm.NiFiSpout; 10 | import org.apache.storm.Config; 11 | import org.apache.storm.LocalCluster; 12 | import org.apache.storm.StormSubmitter; 13 | import org.apache.storm.topology.TopologyBuilder; 14 | import org.apache.storm.topology.base.BaseWindowedBolt; 15 | import org.apache.storm.utils.Utils; 16 | 17 | import java.util.Collections; 18 | import java.util.concurrent.TimeUnit; 19 | 20 | /** 21 | * Storm topology that receives log data from NiFi and counts the number of logs per level over a window, 22 | * sending a new dictionary file back to NiFi based on a rate threshold. 23 | * 24 | * @author bbende. 25 | */ 26 | public class LogLevelCountTopology { 27 | 28 | public static final String DEFAULT_PROPERTIES_FILE = "log-level-count.properties"; 29 | 30 | public static void main( String[] args ) throws Exception { 31 | String propertiesFile = DEFAULT_PROPERTIES_FILE; 32 | if (args != null && args.length == 1 && args[0] != null) { 33 | propertiesFile = args[0]; 34 | } 35 | 36 | LogLevelCountProperties props = new LogLevelCountProperties(propertiesFile); 37 | 38 | int windowMillis = props.getStormWindowMillis(); 39 | double rateThreshold = props.getStormRateThreshold(); 40 | 41 | // Build the spout for pulling data from NiFi and pull out the log level into a tuple field 42 | NiFiSpout niFiSpout = new NiFiSpout(getSourceConfig(props), Collections.singletonList(props.getLogLevelAttribute())); 43 | 44 | // Build the bolt for counting log levels over a tumbling window 45 | BaseWindowedBolt logLevelWindowBolt = new LogLevelWindowBolt(props.getLogLevelAttribute()) 46 | .withTumblingWindow(new BaseWindowedBolt.Duration(windowMillis, TimeUnit.MILLISECONDS)); 47 | 48 | // Build the bolt for pushing results back to NiFi 49 | NiFiDataPacketBuilder dictionaryBuilder = new DictionaryBuilder(windowMillis, rateThreshold); 50 | NiFiBolt niFiBolt = new NiFiBolt(getSinkConfig(props), dictionaryBuilder, 10).withBatchSize(1); 51 | 52 | // Build the topology of NiFiSpout -> LogLevelWindowBolt -> NiFiBolt 53 | TopologyBuilder builder = new TopologyBuilder(); 54 | builder.setSpout("nifiInput", niFiSpout); 55 | builder.setBolt("logLevels", logLevelWindowBolt).shuffleGrouping("nifiInput"); 56 | builder.setBolt("nifiOutput", niFiBolt).shuffleGrouping("logLevels"); 57 | 58 | // Submit the topology 59 | Config conf = new Config(); 60 | conf.setDebug(true); 61 | 62 | // Need to set the message timeout to twice the window size in seconds 63 | conf.setMessageTimeoutSecs((props.getStormWindowMillis()/1000) * 2); 64 | 65 | if (args != null && args.length > 0) { 66 | conf.setNumWorkers(3); 67 | StormSubmitter.submitTopologyWithProgressBar(args[0], conf, builder.createTopology()); 68 | } 69 | else { 70 | LocalCluster cluster = new LocalCluster(); 71 | cluster.submitTopology("log-levels", conf, builder.createTopology()); 72 | Utils.sleep(130000); 73 | cluster.killTopology("log-levels"); 74 | cluster.shutdown(); 75 | } 76 | } 77 | 78 | private static SiteToSiteClientConfig getSourceConfig(LogLevelCountProperties props) { 79 | return new SiteToSiteClient.Builder() 80 | .url(props.getNifiUrl()) 81 | .portName(props.getNifiInputPort()) 82 | .requestBatchCount(props.getNifiRequestBatch()) 83 | .buildConfig(); 84 | } 85 | 86 | private static SiteToSiteClientConfig getSinkConfig(LogLevelCountProperties props) { 87 | return new SiteToSiteClient.Builder() 88 | .url(props.getNifiUrl()) 89 | .portName(props.getNifiOutputPort()) 90 | .buildConfig(); 91 | } 92 | 93 | } 94 | -------------------------------------------------------------------------------- /nifi-flink-examples/src/main/java/nifi/flink/examples/logs/WindowLogLevelCountProps.java: -------------------------------------------------------------------------------- 1 | package nifi.flink.examples.logs; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Properties; 6 | 7 | /** 8 | * Properties for WindowLogLevelAnalytic. 9 | */ 10 | public class WindowLogLevelCountProps { 11 | 12 | static final String NIFI_URL = "nifi.url"; 13 | static final String NIFI_INPUT_PORT = "nifi.input.port"; 14 | static final String NIFI_REQUEST_BATCH = "nifi.input.request.batch"; 15 | static final String NIFI_OUTPUT_PORT = "nifi.output.port"; 16 | 17 | static final String FLINK_WINDOW_MILLIS = "flink.window.milliseconds"; 18 | static final String FLINK_SLIDE_MILLIS = "flink.slide.milliseconds"; 19 | static final String FLINK_RATE_THRESHOLD = "flink.rate.threshold"; 20 | 21 | static final String FLINK_OUTPUT_PATH = "flink.output.path"; 22 | static final String FLINK_OUTPUT_FILENAME = "flink.output.filename"; 23 | static final String LOG_LEVEL_ATTRIBUTE = "log.level.attribute"; 24 | 25 | private final String nifiUrl; 26 | private final String nifiInputPort; 27 | private final String nifiOutputPort; 28 | private final int nifiRequestBatch; 29 | private final int flinkWindowMillis; 30 | private final int flinkSlideMillis; 31 | private final double flinkRateThreshold; 32 | private final String flinkOutputPath; 33 | private final String flinkOutputFileName; 34 | private final String logLevelAttribute; 35 | 36 | public WindowLogLevelCountProps(final String propertiesFile) throws IOException { 37 | final InputStream in = this.getClass().getClassLoader().getResourceAsStream(propertiesFile); 38 | 39 | final Properties properties = new Properties(); 40 | properties.load(in); 41 | 42 | nifiUrl = loadProperty(properties, NIFI_URL); 43 | nifiInputPort = loadProperty(properties, NIFI_INPUT_PORT); 44 | nifiOutputPort = loadProperty(properties, NIFI_OUTPUT_PORT); 45 | 46 | final String tempNiFiRequestBatch = loadProperty(properties, NIFI_REQUEST_BATCH); 47 | nifiRequestBatch = Integer.parseInt(tempNiFiRequestBatch); 48 | 49 | final String tempflinkWindowSize = loadProperty(properties, FLINK_WINDOW_MILLIS); 50 | flinkWindowMillis = Integer.parseInt(tempflinkWindowSize); 51 | 52 | final String tempFlinkSlideSize = loadProperty(properties, FLINK_SLIDE_MILLIS); 53 | flinkSlideMillis = Integer.parseInt(tempFlinkSlideSize); 54 | 55 | final String tempFlinkRateThreshold = loadProperty(properties, FLINK_RATE_THRESHOLD); 56 | flinkRateThreshold = Double.parseDouble(tempFlinkRateThreshold); 57 | 58 | flinkOutputPath = loadProperty(properties, FLINK_OUTPUT_PATH); 59 | flinkOutputFileName = loadProperty(properties, FLINK_OUTPUT_FILENAME); 60 | 61 | logLevelAttribute = loadProperty(properties, LOG_LEVEL_ATTRIBUTE); 62 | } 63 | 64 | private String loadProperty(final Properties properties, final String name) { 65 | final String value = properties.getProperty(name); 66 | if (value == null || value.trim().isEmpty()) { 67 | throw new IllegalStateException(name + " is a required property"); 68 | } 69 | return value.trim(); 70 | } 71 | 72 | public String getNifiUrl() { 73 | return nifiUrl; 74 | } 75 | 76 | public String getNifiInputPort() { 77 | return nifiInputPort; 78 | } 79 | 80 | public String getNifiOutputPort() { 81 | return nifiOutputPort; 82 | } 83 | 84 | public int getNifiRequestBatch() { 85 | return nifiRequestBatch; 86 | } 87 | 88 | public int getFlinkWindowMillis() { 89 | return flinkWindowMillis; 90 | } 91 | 92 | public int getFlinkSlideMillis() { 93 | return flinkSlideMillis; 94 | } 95 | 96 | public double getFlinkRateThreshold() { 97 | return flinkRateThreshold; 98 | } 99 | 100 | public String getFlinkOutputPath() { 101 | return flinkOutputPath; 102 | } 103 | 104 | public String getFlinkOutputFileName() { 105 | return flinkOutputFileName; 106 | } 107 | 108 | public String getLogLevelAttribute() { 109 | return logLevelAttribute; 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /nifi-apex-examples/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | nifi-streaming-examples 5 | org.apache.nifi 6 | 0.0.1-SNAPSHOT 7 | 8 | 4.0.0 9 | 10 | nifi-apex-examples 11 | jar 12 | 13 | 14 | 3.4.0 15 | 3.4.0 16 | lib/*.jar 17 | 18 | 19 | 20 | 21 | 22 | org.apache.maven.plugins 23 | maven-eclipse-plugin 24 | 2.9 25 | 26 | true 27 | 28 | 29 | 30 | maven-compiler-plugin 31 | 3.3 32 | 33 | UTF-8 34 | 1.7 35 | 1.7 36 | true 37 | false 38 | true 39 | true 40 | 41 | 42 | 43 | maven-dependency-plugin 44 | 2.8 45 | 46 | 47 | copy-dependencies 48 | prepare-package 49 | 50 | copy-dependencies 51 | 52 | 53 | target/deps 54 | runtime 55 | 56 | 57 | 58 | 59 | 60 | 61 | maven-assembly-plugin 62 | 63 | 64 | app-package-assembly 65 | package 66 | 67 | single 68 | 69 | 70 | ${project.artifactId}-${project.version}-apexapp 71 | false 72 | 73 | src/assemble/appPackage.xml 74 | 75 | 76 | 0755 77 | 78 | 79 | 80 | ${apex.apppackage.classpath} 81 | ${apex.version} 82 | ${project.groupId} 83 | ${project.artifactId} 84 | ${project.version} 85 | ${project.name} 86 | ${project.description} 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | maven-antrun-plugin 96 | 1.7 97 | 98 | 99 | package 100 | 101 | 102 | 104 | 105 | 106 | 107 | run 108 | 109 | 110 | 111 | 112 | createJavadocDirectory 113 | generate-resources 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | run 122 | 123 | 124 | 125 | 126 | 127 | 128 | org.codehaus.mojo 129 | build-helper-maven-plugin 130 | 1.9.1 131 | 132 | 133 | attach-artifacts 134 | package 135 | 136 | attach-artifact 137 | 138 | 139 | 140 | 141 | target/${project.artifactId}-${project.version}.apa 142 | apa 143 | 144 | 145 | false 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | org.apache.maven.plugins 154 | maven-javadoc-plugin 155 | 156 | 157 | 158 | xml-doclet 159 | generate-resources 160 | 161 | javadoc 162 | 163 | 164 | com.github.markusbernhardt.xmldoclet.XmlDoclet 165 | -d ${project.build.directory}/generated-resources/xml-javadoc -filename ${project.artifactId}-${project.version}-javadoc.xml 166 | false 167 | 168 | com.github.markusbernhardt 169 | xml-doclet 170 | 1.0.4 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | org.codehaus.mojo 179 | xml-maven-plugin 180 | 1.0 181 | 182 | 183 | transform-xmljavadoc 184 | generate-resources 185 | 186 | transform 187 | 188 | 189 | 190 | 191 | 192 | 193 | ${project.build.directory}/generated-resources/xml-javadoc 194 | 195 | ${project.artifactId}-${project.version}-javadoc.xml 196 | 197 | XmlJavadocCommentsExtractor.xsl 198 | ${project.build.directory}/generated-resources/xml-javadoc 199 | 200 | 201 | 202 | 203 | 204 | 205 | maven-resources-plugin 206 | 2.6 207 | 208 | 209 | copy-resources 210 | process-resources 211 | 212 | copy-resources 213 | 214 | 215 | ${basedir}/target/classes 216 | 217 | 218 | ${project.build.directory}/generated-resources/xml-javadoc 219 | 220 | ${project.artifactId}-${project.version}-javadoc.xml 221 | 222 | true 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | org.apache.nifi 237 | nifi-site-to-site-client 238 | 239 | 240 | org.apache.apex 241 | malhar-library 242 | ${malhar.version} 243 | 244 | 245 | org.apache.apex 246 | malhar-contrib 247 | ${malhar.version} 248 | 249 | 250 | org.apache.apex 251 | apex-common 252 | ${apex.version} 253 | provided 254 | 255 | 256 | junit 257 | junit 258 | 4.10 259 | test 260 | 261 | 262 | org.apache.apex 263 | apex-engine 264 | ${apex.version} 265 | test 266 | 267 | 268 | 269 | 270 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /templates/nifi-log-example-core.xml: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /templates/nifi-log-example-edge.xml: -------------------------------------------------------------------------------- 1 | 2 | --------------------------------------------------------------------------------