├── results
└── .keep
├── project
├── build.properties
├── plugins.sbt
└── Dependencies.scala
├── version.sbt
├── misc
└── wayeb_glyph.png
├── patterns
├── demo
│ ├── a_seq_b.sre
│ ├── declarations.sre
│ └── a_seq_b_or_c.sre
├── validation
│ ├── pattern2.sre
│ ├── pattern3.sre
│ └── pattern4.sre
├── homes
│ └── reg1.sre
├── maritime
│ └── port
│ │ ├── pattern.sre
│ │ ├── patternRel.sre
│ │ └── declarationsDistance1.sre
├── stock
│ └── reg1.sre
└── taxi
│ └── reg1.sre
├── docs
├── papers
│ ├── Wayeb_SREMO.pdf
│ ├── Wayeb-DEBS17.pdf
│ ├── Wayeb-LPAR18.pdf
│ ├── Wayeb_VLDBJ22.pdf
│ ├── alevizos_thesis_final.pdf
│ └── Wayeb_VLDBJ22_extended.pdf
└── building.md
├── cef
└── src
│ ├── main
│ ├── scala
│ │ ├── utils
│ │ │ ├── Shutdownable.scala
│ │ │ ├── SerializationUtils.scala
│ │ │ ├── StringUtils.scala
│ │ │ ├── MathUtils.scala
│ │ │ └── MiscUtils.scala
│ │ ├── workflow
│ │ │ ├── task
│ │ │ │ ├── Task.scala
│ │ │ │ ├── fsmTask
│ │ │ │ │ ├── SNFATask.scala
│ │ │ │ │ ├── NSRATask.scala
│ │ │ │ │ ├── DSRATask.scala
│ │ │ │ │ └── DFATask.scala
│ │ │ │ └── predictorTask
│ │ │ │ │ ├── PredictorRandomTask.scala
│ │ │ │ │ └── PredictorNextTask.scala
│ │ │ ├── provider
│ │ │ │ ├── source
│ │ │ │ │ ├── rt
│ │ │ │ │ │ ├── RTSource.scala
│ │ │ │ │ │ ├── RTSourceDirect.scala
│ │ │ │ │ │ └── RTSourceEstimator.scala
│ │ │ │ │ ├── wt
│ │ │ │ │ │ ├── WtSource.scala
│ │ │ │ │ │ ├── WtSourceDirect.scala
│ │ │ │ │ │ ├── WtSourceRT.scala
│ │ │ │ │ │ ├── WtSourceMatrix.scala
│ │ │ │ │ │ ├── WtSourceSPST.scala
│ │ │ │ │ │ └── WtSourceSPSTm.scala
│ │ │ │ │ ├── dfa
│ │ │ │ │ │ ├── DFASource.scala
│ │ │ │ │ │ ├── DFASourceDirect.scala
│ │ │ │ │ │ ├── DFASourceSerialized.scala
│ │ │ │ │ │ ├── DFASourceFromSDFA.scala
│ │ │ │ │ │ ├── DFASourceFromXML.scala
│ │ │ │ │ │ └── DFASourceRegExp.scala
│ │ │ │ │ ├── hmm
│ │ │ │ │ │ ├── HMMSource.scala
│ │ │ │ │ │ ├── HMMSourceDirect.scala
│ │ │ │ │ │ └── HMMSourceEstimator.scala
│ │ │ │ │ ├── psa
│ │ │ │ │ │ ├── PSASource.scala
│ │ │ │ │ │ ├── PSASourceDirect.scala
│ │ │ │ │ │ ├── PSASourceSerialized.scala
│ │ │ │ │ │ └── PSASourceLearner.scala
│ │ │ │ │ ├── pst
│ │ │ │ │ │ ├── PSTSource.scala
│ │ │ │ │ │ ├── PSTSourceDirect.scala
│ │ │ │ │ │ ├── PSTSourceLearnerFromSDFA.scala
│ │ │ │ │ │ ├── PSTSourceLearnerFromDSRA.scala
│ │ │ │ │ │ └── PSTSourceCST.scala
│ │ │ │ │ ├── dsra
│ │ │ │ │ │ ├── DSRASource.scala
│ │ │ │ │ │ ├── DSRASourceSerialized.scala
│ │ │ │ │ │ ├── DSRASourceDirectI.scala
│ │ │ │ │ │ ├── DSRASourceFromSREM.scala
│ │ │ │ │ │ ├── DSRASourceRegExp.scala
│ │ │ │ │ │ └── DSRASourceDirect.scala
│ │ │ │ │ ├── nsra
│ │ │ │ │ │ ├── NSRASource.scala
│ │ │ │ │ │ ├── NSRASourceSerialized.scala
│ │ │ │ │ │ ├── NSRASourceFromSREM.scala
│ │ │ │ │ │ └── NSRASourceRegExp.scala
│ │ │ │ │ ├── sdfa
│ │ │ │ │ │ ├── SDFASource.scala
│ │ │ │ │ │ ├── SDFASourceSerialized.scala
│ │ │ │ │ │ ├── SDFASourceDirectI.scala
│ │ │ │ │ │ ├── SDFASourceDFA.scala
│ │ │ │ │ │ ├── SDFASourceDirect.scala
│ │ │ │ │ │ ├── SDFASourceFromSRE.scala
│ │ │ │ │ │ ├── SDFASourceFormula.scala
│ │ │ │ │ │ └── SDFASourceRegExp.scala
│ │ │ │ │ ├── snfa
│ │ │ │ │ │ ├── SNFASource.scala
│ │ │ │ │ │ ├── SNFASourceFromSRE.scala
│ │ │ │ │ │ ├── SNFASourceSerialized.scala
│ │ │ │ │ │ └── SNFASourceRegExp.scala
│ │ │ │ │ ├── spsa
│ │ │ │ │ │ ├── SPSASource.scala
│ │ │ │ │ │ ├── SPSASourceSerialized.scala
│ │ │ │ │ │ ├── SPSASourceDirectI.scala
│ │ │ │ │ │ ├── SPSASourceDirect.scala
│ │ │ │ │ │ ├── SPSASourcePSASerialized.scala
│ │ │ │ │ │ └── SPSASourceFromSRE.scala
│ │ │ │ │ ├── spst
│ │ │ │ │ │ ├── SPSTSource.scala
│ │ │ │ │ │ ├── SPSTSourceSerialized.scala
│ │ │ │ │ │ ├── SPSTSourceDirectI.scala
│ │ │ │ │ │ ├── SPSTSourceFromSRE.scala
│ │ │ │ │ │ └── SPSTSourceFromSDFA.scala
│ │ │ │ │ ├── matrix
│ │ │ │ │ │ ├── MatrixSource.scala
│ │ │ │ │ │ ├── MCSourceSerialized.scala
│ │ │ │ │ │ ├── MCSourceDirect.scala
│ │ │ │ │ │ ├── MCSourceSPSA.scala
│ │ │ │ │ │ ├── MCSourceMLE.scala
│ │ │ │ │ │ └── MCSourceProbs.scala
│ │ │ │ │ ├── order
│ │ │ │ │ │ ├── OrderSource.scala
│ │ │ │ │ │ ├── OrderSourceDirect.scala
│ │ │ │ │ │ └── OrderSourceCrossVal.scala
│ │ │ │ │ ├── spstm
│ │ │ │ │ │ ├── SPSTmSource.scala
│ │ │ │ │ │ ├── SPSTmSourceSerialized.scala
│ │ │ │ │ │ ├── SPSTmSourceDirectI.scala
│ │ │ │ │ │ ├── SPSTmSourceFromSREM.scala
│ │ │ │ │ │ └── SPSTmSourceFromDSRA.scala
│ │ │ │ │ └── forecaster
│ │ │ │ │ │ ├── ForecasterSource.scala
│ │ │ │ │ │ ├── ForecasterSourceSerialized.scala
│ │ │ │ │ │ ├── ForecasterSourceDirect.scala
│ │ │ │ │ │ ├── ForecasterSourceRandom.scala
│ │ │ │ │ │ ├── ForecasterNextSourceBuild.scala
│ │ │ │ │ │ ├── ForecasterHMMSourceBuild.scala
│ │ │ │ │ │ └── ForecasterSourceBuild.scala
│ │ │ │ └── AbstractProvider.scala
│ │ │ └── condition
│ │ │ │ ├── FileExistsCondition.scala
│ │ │ │ └── Condition.scala
│ │ ├── fsm
│ │ │ ├── symbolic
│ │ │ │ ├── Constants.scala
│ │ │ │ ├── sre
│ │ │ │ │ ├── BooleanOperator.scala
│ │ │ │ │ ├── SelectionStrategy.scala
│ │ │ │ │ ├── RegularOperator.scala
│ │ │ │ │ └── Declaration.scala
│ │ │ │ ├── sfa
│ │ │ │ │ ├── SFAState.scala
│ │ │ │ │ ├── sdfa
│ │ │ │ │ │ └── SDFAState.scala
│ │ │ │ │ ├── snfa
│ │ │ │ │ │ ├── SNFAState.scala
│ │ │ │ │ │ └── SNFAStateMutant.scala
│ │ │ │ │ ├── Constants.scala
│ │ │ │ │ ├── SFAGuard.scala
│ │ │ │ │ ├── SFATransition.scala
│ │ │ │ │ └── SFA.scala
│ │ │ │ ├── sra
│ │ │ │ │ ├── SRAState.scala
│ │ │ │ │ ├── nsra
│ │ │ │ │ │ ├── NSRAState.scala
│ │ │ │ │ │ ├── NSRA.scala
│ │ │ │ │ │ └── Tracker.scala
│ │ │ │ │ ├── dsra
│ │ │ │ │ │ ├── DSRAState.scala
│ │ │ │ │ │ └── DSRA.scala
│ │ │ │ │ └── SRAGuard.scala
│ │ │ │ ├── AutomatonState.scala
│ │ │ │ ├── Guard.scala
│ │ │ │ ├── TransitionOutput.scala
│ │ │ │ ├── logic
│ │ │ │ │ ├── EpsilonSentence.scala
│ │ │ │ │ ├── IsEventTypeSentence.scala
│ │ │ │ │ ├── predicates
│ │ │ │ │ │ ├── TruePredicate.scala
│ │ │ │ │ │ ├── EQStr.scala
│ │ │ │ │ │ ├── IsEventTypePredicate.scala
│ │ │ │ │ │ ├── EQAttr.scala
│ │ │ │ │ │ ├── EQAttrStr.scala
│ │ │ │ │ │ ├── GTAttr.scala
│ │ │ │ │ │ ├── LTAttr.scala
│ │ │ │ │ │ ├── EQ.scala
│ │ │ │ │ │ ├── GT.scala
│ │ │ │ │ │ ├── LT.scala
│ │ │ │ │ │ ├── GTE.scala
│ │ │ │ │ │ ├── LTE.scala
│ │ │ │ │ │ ├── OutsideCirclePredicate.scala
│ │ │ │ │ │ ├── WithinCirclePredicate.scala
│ │ │ │ │ │ ├── BT.scala
│ │ │ │ │ │ ├── EpsilonPredicate.scala
│ │ │ │ │ │ └── DistanceBetweenPredicate.scala
│ │ │ │ │ ├── TrueSentence.scala
│ │ │ │ │ ├── Predicate.scala
│ │ │ │ │ ├── TruthTable.scala
│ │ │ │ │ ├── BooleanPermutator.scala
│ │ │ │ │ └── Assignment.scala
│ │ │ │ └── Transition.scala
│ │ │ ├── classical
│ │ │ │ ├── pattern
│ │ │ │ │ ├── regexp
│ │ │ │ │ │ ├── NodeType.scala
│ │ │ │ │ │ ├── OperatorType.scala
│ │ │ │ │ │ ├── RegExpTree.scala
│ │ │ │ │ │ ├── SymbolNode.scala
│ │ │ │ │ │ ├── OperatorNode.scala
│ │ │ │ │ │ └── archived
│ │ │ │ │ │ │ └── Node.scala
│ │ │ │ │ └── archived
│ │ │ │ │ │ └── Reader.scala
│ │ │ │ └── FATransition.scala
│ │ │ ├── WindowType.scala
│ │ │ ├── CountPolicy.scala
│ │ │ ├── runtime
│ │ │ │ ├── RunPrototype.scala
│ │ │ │ ├── RunListener.scala
│ │ │ │ └── RunRegistry.scala
│ │ │ └── FSMModel.scala
│ │ ├── stream
│ │ │ ├── array
│ │ │ │ ├── EventStreamI.scala
│ │ │ │ ├── PSAStream.scala
│ │ │ │ ├── archived
│ │ │ │ │ ├── CSVStream.scala
│ │ │ │ │ └── Generator.scala
│ │ │ │ ├── XMLParser.scala
│ │ │ │ ├── ListStream.scala
│ │ │ │ └── ProbMapStream.scala
│ │ │ ├── source
│ │ │ │ ├── EmitMode.scala
│ │ │ │ ├── StreamListener.scala
│ │ │ │ ├── EndOfStreamEvent.scala
│ │ │ │ ├── JsonLineParser.scala
│ │ │ │ ├── GenericCSVLineParser.scala
│ │ │ │ ├── LineParser.scala
│ │ │ │ ├── ArrayStreamSource.scala
│ │ │ │ └── JsonFileStreamSource.scala
│ │ │ ├── ResetEvent.scala
│ │ │ └── domain
│ │ │ │ ├── maritime
│ │ │ │ └── MaritimeLineParser.scala
│ │ │ │ └── homes
│ │ │ │ └── HomesLineParser.scala
│ │ ├── model
│ │ │ ├── forecaster
│ │ │ │ ├── ForecasterType.scala
│ │ │ │ ├── runtime
│ │ │ │ │ ├── ForecasterPrototype.scala
│ │ │ │ │ └── ForecasterRegistry.scala
│ │ │ │ ├── ForecasterInterface.scala
│ │ │ │ ├── next
│ │ │ │ │ ├── NextForecasterBuilder.scala
│ │ │ │ │ └── NextForecaster.scala
│ │ │ │ ├── random
│ │ │ │ │ └── RandomForecaster.scala
│ │ │ │ └── wt
│ │ │ │ │ └── WtForecaster.scala
│ │ │ ├── ProbModel.scala
│ │ │ ├── vmm
│ │ │ │ ├── mapper
│ │ │ │ │ ├── SymbolMapper.scala
│ │ │ │ │ └── SymbolExtractorFromDSRA.scala
│ │ │ │ ├── pst
│ │ │ │ │ ├── psa
│ │ │ │ │ │ ├── PSATransition.scala
│ │ │ │ │ │ └── PSAMatrix.scala
│ │ │ │ │ ├── spsa
│ │ │ │ │ │ └── SPSATransition.scala
│ │ │ │ │ └── BufferBank.scala
│ │ │ │ └── Symbol.scala
│ │ │ ├── waitingTime
│ │ │ │ └── ForecastMethod.scala
│ │ │ └── markov
│ │ │ │ └── MarkovChainFactory.scala
│ │ ├── profiler
│ │ │ ├── StatsEstimator.scala
│ │ │ ├── ForecastCollector.scala
│ │ │ └── ProfilerInterface.scala
│ │ ├── estimator
│ │ │ ├── RunEstimator.scala
│ │ │ ├── HMMEstimator
│ │ │ │ ├── IsoHMM.scala
│ │ │ │ └── FSMStateRun.scala
│ │ │ └── RemainingTimeEstimator
│ │ │ │ └── MeanRun.scala
│ │ └── db
│ │ │ └── DetectionsTable.scala
│ └── resources
│ │ └── logback.xml
│ └── test
│ └── scala
│ └── Specs
│ ├── vmm
│ ├── CompleteProperSuffixSet.scala
│ └── PSAGenerator.scala
│ ├── engine
│ └── EngineSpec.scala
│ ├── selection
│ └── TransformToStrict.scala
│ ├── classical
│ ├── nfa
│ │ └── NFA2DFA.scala
│ └── dfa
│ │ └── DisSpec.scala
│ ├── symbolic
│ ├── snfa
│ │ ├── SNFAeqNEGSNFA.scala
│ │ └── NFAEqSNFA.scala
│ └── sdfa
│ │ └── SDFADistances.scala
│ └── misc
│ └── PermutationsSpec.scala
├── kafkaConfigs
└── kafkaEarliest.properties
├── LICENSE.md
├── data
└── demo
│ └── data.csv
└── .gitignore
/results/.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.2.8
--------------------------------------------------------------------------------
/version.sbt:
--------------------------------------------------------------------------------
1 | version in ThisBuild := "0.6.0-SNAPSHOT"
--------------------------------------------------------------------------------
/misc/wayeb_glyph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ElAlev/Wayeb/HEAD/misc/wayeb_glyph.png
--------------------------------------------------------------------------------
/patterns/demo/a_seq_b.sre:
--------------------------------------------------------------------------------
1 | ;(IsEventTypePredicate(A),IsEventTypePredicate(B)){order:2}{window:2}
--------------------------------------------------------------------------------
/docs/papers/Wayeb_SREMO.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ElAlev/Wayeb/HEAD/docs/papers/Wayeb_SREMO.pdf
--------------------------------------------------------------------------------
/patterns/validation/pattern2.sre:
--------------------------------------------------------------------------------
1 | ;(IsEventTypePredicate(A)["y"],*(EQAttr(attr,"y"))){order:0}{window:3}
--------------------------------------------------------------------------------
/docs/papers/Wayeb-DEBS17.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ElAlev/Wayeb/HEAD/docs/papers/Wayeb-DEBS17.pdf
--------------------------------------------------------------------------------
/docs/papers/Wayeb-LPAR18.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ElAlev/Wayeb/HEAD/docs/papers/Wayeb-LPAR18.pdf
--------------------------------------------------------------------------------
/docs/papers/Wayeb_VLDBJ22.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ElAlev/Wayeb/HEAD/docs/papers/Wayeb_VLDBJ22.pdf
--------------------------------------------------------------------------------
/patterns/demo/declarations.sre:
--------------------------------------------------------------------------------
1 | ~(IsEventTypePredicate(A),IsEventTypePredicate(B),IsEventTypePredicate(C))
--------------------------------------------------------------------------------
/docs/papers/alevizos_thesis_final.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ElAlev/Wayeb/HEAD/docs/papers/alevizos_thesis_final.pdf
--------------------------------------------------------------------------------
/cef/src/main/scala/utils/Shutdownable.scala:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | trait Shutdownable {
4 | def shutdown(): Unit
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/task/Task.scala:
--------------------------------------------------------------------------------
1 | package workflow.task
2 |
3 | trait Task {
4 | def execute(): Object
5 | }
6 |
--------------------------------------------------------------------------------
/docs/papers/Wayeb_VLDBJ22_extended.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ElAlev/Wayeb/HEAD/docs/papers/Wayeb_VLDBJ22_extended.pdf
--------------------------------------------------------------------------------
/patterns/validation/pattern3.sre:
--------------------------------------------------------------------------------
1 | ;(IsEventTypePredicate(A)["x"],^(IsEventTypePredicate(B),EQAttr(xattr,"x"))){partitionBy:someId}
--------------------------------------------------------------------------------
/patterns/validation/pattern4.sre:
--------------------------------------------------------------------------------
1 | ;(^(IsEventTypePredicate(B),EQAttr(xattr,"x")),IsEventTypePredicate(A)["x"]){partitionBy:someId}
--------------------------------------------------------------------------------
/patterns/demo/a_seq_b_or_c.sre:
--------------------------------------------------------------------------------
1 | ;(IsEventTypePredicate(A),+(IsEventTypePredicate(B),IsEventTypePredicate(C))){order:2}{window:0}
2 |
--------------------------------------------------------------------------------
/patterns/homes/reg1.sre:
--------------------------------------------------------------------------------
1 | #(;(EQ(householdId,0.0)["x"],EQ(householdId,2.0),^(EQ(householdId,4.0),GTAttr(value,"x")))){window:10}{windowType:time}
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/Constants.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic
2 |
3 | object Constants {
4 | val deadStateIdConstant: Int = 0
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/rt/RTSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.rt
2 |
3 | abstract class RTSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/wt/WtSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.wt
2 |
3 | abstract class WtSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/array/EventStreamI.scala:
--------------------------------------------------------------------------------
1 | package stream.array
2 |
3 | trait EventStreamI {
4 | def generateStream(): EventStream
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dfa/DFASource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dfa
2 |
3 | abstract class DFASource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/hmm/HMMSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.hmm
2 |
3 | abstract class HMMSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/psa/PSASource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.psa
2 |
3 | abstract class PSASource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/pst/PSTSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.pst
2 |
3 | abstract class PSTSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dsra/DSRASource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dsra
2 |
3 | abstract class DSRASource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/nsra/NSRASource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.nsra
2 |
3 | abstract class NSRASource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | abstract class SDFASource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/snfa/SNFASource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.snfa
2 |
3 | abstract class SNFASource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spsa/SPSASource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spsa
2 |
3 | abstract class SPSASource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spst/SPSTSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spst
2 |
3 | abstract class SPSTSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/matrix/MatrixSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.matrix
2 |
3 | abstract class MatrixSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/order/OrderSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.order
2 |
3 | abstract class OrderSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spstm/SPSTmSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spstm
2 |
3 | abstract class SPSTmSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/forecaster/ForecasterSource.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.forecaster
2 |
3 | abstract class ForecasterSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/kafkaConfigs/kafkaEarliest.properties:
--------------------------------------------------------------------------------
1 | inputTopic=wayebTopic
2 | bootstrap.servers=localhost:9092
3 | group.id=group1
4 | auto.offset.reset=earliest
5 | enable.auto.commit=false
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/EmitMode.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | object EmitMode extends Enumeration {
4 | type EmitMode = Value
5 | val BUFFER, ONLINE = Value
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/order/OrderSourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.order
2 |
3 | class OrderSourceDirect(val order: Int) extends OrderSource {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/StreamListener.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | import stream.GenericEvent
4 |
5 | trait StreamListener {
6 | def newEventEmitted(event: GenericEvent): Unit
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sre/BooleanOperator.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sre
2 |
3 | object BooleanOperator extends Enumeration {
4 | type BooleanOperator = Value
5 | val AND, OR, NOT = Value
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/SFAState.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa
2 |
3 | import fsm.symbolic.AutomatonState
4 |
5 | abstract class SFAState(override val id: Int) extends AutomatonState(id) {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sra/SRAState.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sra
2 |
3 | import fsm.symbolic.AutomatonState
4 |
5 | abstract class SRAState(override val id: Int) extends AutomatonState(id) {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/patterns/maritime/port/pattern.sre:
--------------------------------------------------------------------------------
1 | ;(OutsideCirclePredicate(-4.47530,48.38273,5.0),OutsideCirclePredicate(-4.47530,48.38273,5.0),WithinCirclePredicate(-4.47530,48.38273,5.0)){order:2}{partitionBy:mmsi}{window:1000}
2 |
--------------------------------------------------------------------------------
/patterns/stock/reg1.sre:
--------------------------------------------------------------------------------
1 | #(;(^(IsEventTypePredicate(SELL),EQStr(name,INTC))["x"],^(IsEventTypePredicate(BUY),EQStr(name,RIMM)),^(IsEventTypePredicate(BUY),EQStr(name,QQQ),GTAttr(price,"x")))){window:500}{windowType:time}
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/pattern/regexp/NodeType.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical.pattern.regexp
2 |
3 | object NodeType extends Enumeration {
4 | type NodeType = Value
5 | val OPERATOR, SYMBOL = Value
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sre/SelectionStrategy.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sre
2 |
3 | object SelectionStrategy extends Enumeration {
4 | type SelectionStrategy = Value
5 | val STRICT, ANY, NEXT = Value
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/ForecasterType.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster
2 |
3 | object ForecasterType extends Enumeration {
4 | type ForecasterType = Value
5 | val REGRESSION, CLASSIFICATION = Value
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/sdfa/SDFAState.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa.sdfa
2 |
3 | import fsm.symbolic.sfa.SFAState
4 |
5 | case class SDFAState private[sfa] (override val id: Int) extends SFAState(id) {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sra/nsra/NSRAState.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sra.nsra
2 |
3 | import fsm.symbolic.sra.SRAState
4 |
5 | case class NSRAState private[nsra] (override val id: Int) extends SRAState(id) {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sre/RegularOperator.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sre
2 |
3 | object RegularOperator extends Enumeration {
4 | type RegularOperator = Value
5 | val SEQ, CHOICE, ITER, NEG, ANY, NEXT = Value
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/runtime/ForecasterPrototype.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster.runtime
2 |
3 | trait ForecasterPrototype {
4 | def cloneForecaster(runId: Int): ForecasterRun
5 | def getInterfaceId: Int
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dfa/DFASourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dfa
2 |
3 | import fsm.classical.fa.dfa.DFA
4 |
5 | class DFASourceDirect(val dfa: List[DFA]) extends DFASource {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/AutomatonState.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 |
5 | abstract class AutomatonState(val id: Int) extends Serializable with LazyLogging {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/snfa/SNFAState.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa.snfa
2 |
3 | import fsm.symbolic.sfa.SFAState
4 |
5 | case class SNFAState private[snfa](override val id: Int) extends SFAState(id = id) {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sra/dsra/DSRAState.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sra.dsra
2 |
3 | import fsm.symbolic.sra.SRAState
4 |
5 | case class DSRAState private[dsra] (override val id: Int) extends SRAState(id) {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/pattern/regexp/OperatorType.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical.pattern.regexp
2 |
3 | object OperatorType extends Enumeration {
4 | type OperatorType = Value
5 | val NONE, CONCAT, UNION, ITER = Value
6 | }
7 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/pattern/regexp/RegExpTree.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical.pattern.regexp
2 |
3 | /**
4 | * A regular expression is represented as a tree.
5 | */
6 | abstract class RegExpTree
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/patterns/maritime/port/patternRel.sre:
--------------------------------------------------------------------------------
1 | ;(OutsideCirclePredicate(-4.47530,48.38273,5.0),OutsideCirclePredicate(-4.47530,48.38273,5.0)["x"],^(WithinCirclePredicate(-4.47530,48.38273,5.0),GTAttr(speed,"x"))){partitionBy:mmsi}{window:1000}{windowType:time}
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/psa/PSASourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.psa
2 |
3 | import model.vmm.pst.psa.ProbSuffixAutomaton
4 |
5 | class PSASourceDirect(val psa: List[ProbSuffixAutomaton]) extends PSASource {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/patterns/taxi/reg1.sre:
--------------------------------------------------------------------------------
1 | #(;(^(EQStr(pickupZone,EastHarlemNorth),EQStr(dropoffZone,Midwood))["x"],^(EQStr(pickupZone,Midwood),EQStr(dropoffZone,Gravesend)),^(EQStr(pickupZone,Gravesend),EQStr(dropoffZone,WestBrighton),GTAttr(totalAmount,"x")))){window:100}{windowType:time}
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/matrix/MCSourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.matrix
2 |
3 | object MCSourceSerialized {
4 | def apply(fn: String): MCSourceSerialized = new MCSourceSerialized(fn)
5 | }
6 | class MCSourceSerialized(val fn: String) extends MatrixSource {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/Constants.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa
2 |
3 | object Constants {
4 | val predicatesClassPrefix = "fsm.symbolic.logic.predicates."
5 | val epsilonPredicate = "EpsilonPredicate"
6 | val truePredicate = "TruePredicate"
7 | val eventTypePredicate = "IsEventTypePredicate"
8 | }
9 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dfa/DFASourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dfa
2 |
3 | object DFASourceSerialized {
4 | def apply(fn: String): DFASourceSerialized = new DFASourceSerialized(fn)
5 | }
6 |
7 | class DFASourceSerialized(val fn: String) extends DFASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/psa/PSASourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.psa
2 |
3 | object PSASourceSerialized {
4 | def apply(fn: String): PSASourceSerialized = new PSASourceSerialized(fn)
5 | }
6 |
7 | class PSASourceSerialized(val fn: String) extends PSASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dsra/DSRASourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dsra
2 |
3 | object DSRASourceSerialized {
4 | def apply(fn: String): DSRASourceSerialized = new DSRASourceSerialized(fn)
5 | }
6 |
7 | class DSRASourceSerialized(val fn: String) extends DSRASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/nsra/NSRASourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.nsra
2 |
3 | object NSRASourceSerialized {
4 | def apply(fn: String): NSRASourceSerialized = new NSRASourceSerialized(fn)
5 | }
6 |
7 | class NSRASourceSerialized(val fn: String) extends NSRASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | object SDFASourceSerialized {
4 | def apply(fn: String): SDFASourceSerialized = new SDFASourceSerialized(fn)
5 | }
6 |
7 | class SDFASourceSerialized(val fn: String) extends SDFASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/snfa/SNFASourceFromSRE.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.snfa
2 |
3 | object SNFASourceFromSRE {
4 | def apply(sreFile: String): SNFASourceFromSRE = new SNFASourceFromSRE(sreFile)
5 | }
6 |
7 | class SNFASourceFromSRE(val sreFile: String) extends SNFASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/snfa/SNFASourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.snfa
2 |
3 | object SNFASourceSerialized {
4 | def apply(fn: String): SNFASourceSerialized = new SNFASourceSerialized(fn)
5 | }
6 |
7 | class SNFASourceSerialized(val fn: String) extends SNFASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spsa/SPSASourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spsa
2 |
3 | object SPSASourceSerialized {
4 | def apply(fn: String): SPSASourceSerialized = new SPSASourceSerialized(fn)
5 | }
6 |
7 | class SPSASourceSerialized(val fn: String) extends SPSASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spst/SPSTSourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spst
2 |
3 | object SPSTSourceSerialized {
4 | def apply(fn: String): SPSTSourceSerialized = new SPSTSourceSerialized(fn)
5 | }
6 |
7 | class SPSTSourceSerialized(val fn: String) extends SPSTSource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/nsra/NSRASourceFromSREM.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.nsra
2 |
3 | object NSRASourceFromSREM {
4 | def apply(sremFile: String): NSRASourceFromSREM = new NSRASourceFromSREM(sremFile)
5 | }
6 |
7 | class NSRASourceFromSREM(val sremFile: String) extends NSRASource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spstm/SPSTmSourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spstm
2 |
3 | object SPSTmSourceSerialized {
4 | def apply(fn: String): SPSTmSourceSerialized = new SPSTmSourceSerialized(fn)
5 | }
6 |
7 | class SPSTmSourceSerialized(val fn: String) extends SPSTmSource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10")
2 |
3 | addSbtPlugin("com.scalapenos" % "sbt-prompt" % "1.0.2")
4 |
5 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13")
6 |
7 | addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")
8 |
9 | addSbtPlugin("io.get-coursier" % "sbt-coursier" % "2.0.0-RC5-3")
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/hmm/HMMSourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.hmm
2 |
3 | import estimator.HMMEstimator.IsoHMM
4 |
5 | object HMMSourceDirect {
6 | def apply(hmms: List[IsoHMM]): HMMSourceDirect = new HMMSourceDirect(hmms)
7 | }
8 |
9 | class HMMSourceDirect(val hmms: List[IsoHMM]) extends HMMSource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/matrix/MCSourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.matrix
2 |
3 | import model.markov.MarkovChain
4 |
5 | object MCSourceDirect {
6 | def apply(mcs: List[MarkovChain]): MCSourceDirect = new MCSourceDirect(mcs)
7 | }
8 |
9 | class MCSourceDirect(val mcs: List[MarkovChain]) extends MatrixSource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/forecaster/ForecasterSourceSerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.forecaster
2 |
3 | object ForecasterSourceSerialized {
4 | def apply(fn: String): ForecasterSourceSerialized = new ForecasterSourceSerialized(fn)
5 | }
6 |
7 | class ForecasterSourceSerialized(val fn: String) extends ForecasterSource {
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/SFAGuard.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa
2 |
3 | import fsm.symbolic.Guard
4 | import fsm.symbolic.logic.Sentence
5 |
6 | object SFAGuard {
7 | def apply(sentence: Sentence): SFAGuard = new SFAGuard(sentence)
8 | }
9 |
10 | class SFAGuard private[sfa](sentence: Sentence) extends Guard(sentence = sentence) with Serializable {
11 |
12 | }
13 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dsra/DSRASourceDirectI.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dsra
2 |
3 | import fsm.DSRAInterface
4 |
5 | object DSRASourceDirectI {
6 | def apply(dsrai: List[DSRAInterface]): DSRASourceDirectI = new DSRASourceDirectI(dsrai)
7 | }
8 |
9 | class DSRASourceDirectI(val dsrai: List[DSRAInterface]) extends DSRASource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spsa/SPSASourceDirectI.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spsa
2 |
3 | import fsm.SPSAInterface
4 |
5 | object SPSASourceDirectI {
6 | def apply(spsai: List[SPSAInterface]): SPSASourceDirectI = new SPSASourceDirectI(spsai)
7 | }
8 |
9 | class SPSASourceDirectI(val spsai: List[SPSAInterface]) extends SPSASource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spst/SPSTSourceDirectI.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spst
2 |
3 | import fsm.SPSTInterface
4 |
5 | object SPSTSourceDirectI {
6 | def apply(spsti: List[SPSTInterface]): SPSTSourceDirectI = new SPSTSourceDirectI(spsti)
7 | }
8 |
9 | class SPSTSourceDirectI(val spsti: List[SPSTInterface]) extends SPSTSource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/EndOfStreamEvent.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | import stream.GenericEvent
4 |
5 | /**
6 | * Special event. This event indicates that there are no more events in the stream.
7 | * Useful so as to know when to shutdown the forecasting engine.
8 | */
9 | final class EndOfStreamEvent extends GenericEvent(-1, "EndOfStream", 0, Map.empty)
10 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/psa/PSASourceLearner.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.psa
2 |
3 | import stream.array.EventStream
4 |
5 | object PSASourceLearner {
6 | def apply(trainStream: EventStream): PSASourceLearner = new PSASourceLearner(trainStream)
7 | }
8 |
9 | class PSASourceLearner(val trainStream: EventStream) extends PSASource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/rt/RTSourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.rt
2 |
3 | import estimator.RemainingTimeEstimator.RemainingTimes
4 |
5 | object RTSourceDirect {
6 | def apply(rts: List[RemainingTimes]): RTSourceDirect = new RTSourceDirect(rts)
7 | }
8 |
9 | class RTSourceDirect(val rts: List[RemainingTimes]) extends RTSource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASourceDirectI.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | import fsm.SDFAInterface
4 |
5 | object SDFASourceDirectI {
6 | def apply(sdfai: List[SDFAInterface]): SDFASourceDirectI = new SDFASourceDirectI(sdfai)
7 |
8 | }
9 |
10 | class SDFASourceDirectI(val sdfai: List[SDFAInterface]) extends SDFASource {
11 |
12 | }
13 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/WindowType.scala:
--------------------------------------------------------------------------------
1 | package fsm
2 |
3 | object WindowType extends Enumeration {
4 | type WindowType = Value
5 | val COUNT, TIME = Value
6 |
7 | def str2Wt(string: String): WindowType = string match {
8 | case "count" => COUNT
9 | case "time" => TIME
10 | case _ => throw new IllegalArgumentException("Unrecognized window type: " + string)
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spstm/SPSTmSourceDirectI.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spstm
2 |
3 | import fsm.SPSTmInterface
4 |
5 | object SPSTmSourceDirectI {
6 | def apply(spstmi: List[SPSTmInterface]): SPSTmSourceDirectI = new SPSTmSourceDirectI(spstmi)
7 | }
8 |
9 | class SPSTmSourceDirectI(val spstmi: List[SPSTmInterface]) extends SPSTmSource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/wt/WtSourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.wt
2 |
3 | import model.waitingTime.WtDistribution
4 |
5 | object WtSourceDirect {
6 | def apply(wtds: List[Map[Int, WtDistribution]]): WtSourceDirect = new WtSourceDirect(wtds)
7 | }
8 |
9 | class WtSourceDirect(val wtds: List[Map[Int, WtDistribution]]) extends WtSource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/profiler/StatsEstimator.scala:
--------------------------------------------------------------------------------
1 | package profiler
2 |
3 | abstract class StatsEstimator {
4 | def printProfileInfo(): Unit
5 | def printProfileInfo(fn: String): Unit
6 | def printPerStateProfileInfo(): Unit
7 | def printPerStateProfileInfo(fn: String): Unit
8 | def getStat(which: String): String
9 | def estimateStats(): Unit
10 | def estimatePerStateStats(): Unit
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/CountPolicy.scala:
--------------------------------------------------------------------------------
1 | package fsm
2 |
3 | object CountPolicy extends Enumeration {
4 | type CountPolicy = Value
5 | val OVERLAP, NONOVERLAP = Value
6 | def str2Pol(string: String): CountPolicy = string match {
7 | case "overlap" => OVERLAP
8 | case "nonoverlap" => NONOVERLAP
9 | case _ => throw new IllegalArgumentException("Unrecognized policy: " + string)
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/runtime/RunPrototype.scala:
--------------------------------------------------------------------------------
1 | package fsm.runtime
2 |
3 | /**
4 | * Each run prototype must implement this trait.
5 | */
6 | trait RunPrototype {
7 | /**
8 | * A run prototype must be able to clone itself.
9 | * @return A new run.
10 | */
11 | def cloneRun(id: Int): Run
12 |
13 | /**
14 | * @return The id of the run's FSM.
15 | */
16 | def getFsmId: Int
17 | }
18 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/ProbModel.scala:
--------------------------------------------------------------------------------
1 | package model
2 |
3 | object ProbModel extends Enumeration {
4 | type ProbModel = Value
5 | val FMM, VMM = Value
6 |
7 | def string2ProbModel(str: String): ProbModel = {
8 | str match {
9 | case "fmm" => FMM
10 | case "vmm" => VMM
11 | case _ => throw new IllegalArgumentException("Probabilistic model not recognized " + str)
12 | }
13 | }
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/condition/FileExistsCondition.scala:
--------------------------------------------------------------------------------
1 | package workflow.condition
2 |
3 | import java.nio.file.{Files, Paths}
4 |
5 | /**
6 | * Simple condition that checks whether there exists a file at the given path.
7 | *
8 | * @param fn The given path.
9 | */
10 | class FileExistsCondition(fn: String) extends Condition {
11 |
12 | override def check(): Boolean = Files.exists(Paths.get(fn))
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/Guard.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic
2 |
3 | import fsm.symbolic.logic.{EpsilonSentence, Sentence}
4 | import stream.GenericEvent
5 |
6 | abstract class Guard (val sentence: Sentence) extends Serializable {
7 | def check(event: GenericEvent): Boolean = sentence.evaluate(event)
8 |
9 | def isEpsilon: Boolean = sentence.isInstanceOf[EpsilonSentence]
10 |
11 | def isSentence(s: Sentence): Boolean = s == sentence
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/matrix/MCSourceSPSA.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.matrix
2 |
3 | import workflow.provider.{FSMProvider, SPSAProvider}
4 |
5 | object MCSourceSPSA {
6 | def apply(fsmp: FSMProvider): MCSourceSPSA = {
7 | require(fsmp.isSPSA)
8 | new MCSourceSPSA(fsmp.wrappedProvider.asInstanceOf[SPSAProvider])
9 | }
10 | }
11 |
12 | class MCSourceSPSA(val spsa: SPSAProvider) extends MatrixSource {
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/forecaster/ForecasterSourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.forecaster
2 |
3 | import model.forecaster.ForecasterInterface
4 |
5 | object ForecasterSourceDirect {
6 | def apply(forecasters: List[ForecasterInterface]): ForecasterSourceDirect = new ForecasterSourceDirect(forecasters)
7 | }
8 |
9 | class ForecasterSourceDirect(val forecasters: List[ForecasterInterface]) extends ForecasterSource {
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/FSMModel.scala:
--------------------------------------------------------------------------------
1 | package fsm
2 |
3 | object FSMModel extends Enumeration {
4 | type FSMModel = Value
5 | val NSFA, DSFA, NSRA, DSRA = Value
6 |
7 | def string2FSMModel(str: String): FSMModel = {
8 | str match {
9 | case "nsfa" => NSFA
10 | case "dsfa" => DSFA
11 | case "nsra" => NSRA
12 | case "dsra" => DSRA
13 | case _ => throw new IllegalArgumentException("FSM model not recognized " + str)
14 | }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/TransitionOutput.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic
2 |
3 | /**
4 | * Represents the "output" emitted by a transition.
5 | * Essentially marks a transition as being a TAKE one, meaning that the triggering event is part of the match, or as
6 | * being an IGNORE one, meaning that the event is irrelevant and must be skipped.
7 | */
8 | object TransitionOutput extends Enumeration {
9 | type TransitionOutput = Value
10 | val TAKE, IGNORE = Value
11 | }
12 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/EpsilonSentence.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic
2 |
3 | import fsm.symbolic.logic.predicates.EpsilonPredicate
4 |
5 | /**
6 | * A special class representing sentences for epsilon transitions.
7 | *
8 | * @param predicate The predicate which must be an epsilon predicate.
9 | */
10 | class EpsilonSentence(predicate: Predicate) extends AtomicSentence(predicate, Set.empty) {
11 | require(predicate.isInstanceOf[EpsilonPredicate])
12 | }
13 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/vmm/mapper/SymbolMapper.scala:
--------------------------------------------------------------------------------
1 | package model.vmm.mapper
2 |
3 | import model.vmm.Symbol
4 | import stream.GenericEvent
5 |
6 | /**
7 | * The function of this trait is to map events to symbols.
8 | * For symbolic automata, an isomorphism is used.
9 | * For symbolic automata with registers, each transition is mapped to a symbol.
10 | */
11 | trait SymbolMapper {
12 |
13 | def evaluate(event: GenericEvent): Symbol
14 |
15 | def getSymbols: List[Symbol]
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/ForecasterInterface.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster
2 |
3 | import model.forecaster.ForecasterType.ForecasterType
4 | import model.forecaster.runtime.RelativeForecast
5 |
6 | trait ForecasterInterface {
7 | def getNewForecast(
8 | state: Int,
9 | timestamp: Long
10 | ): RelativeForecast
11 | def getStates: Set[Int]
12 | def getId: Int
13 | def getMaxSpread: Int
14 | def getType: ForecasterType
15 | }
16 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/runtime/RunListener.scala:
--------------------------------------------------------------------------------
1 | package fsm.runtime
2 |
3 | /**
4 | * Every class that must monitor a run should implement this trait.
5 | */
6 | trait RunListener {
7 | /**
8 | * Method to determine how a new event should be processed.
9 | * @param rm The message received from the run.
10 | */
11 | def newEventProcessed(rm: RunMessage): Unit
12 |
13 | /**
14 | * Method to determine what happens when we need to shutdown.
15 | */
16 | def shutdown(): Unit
17 | }
18 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/pst/PSTSourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.pst
2 |
3 | import model.vmm.mapper.{Isomorphism, SymbolMapper}
4 | import model.vmm.pst.PredictionSuffixTree
5 |
6 | object PSTSourceDirect {
7 |
8 | def apply(
9 | pst: List[(PredictionSuffixTree, SymbolMapper)]
10 | ): PSTSourceDirect = new PSTSourceDirect( pst)
11 | }
12 |
13 | class PSTSourceDirect(val pst: List[(PredictionSuffixTree, SymbolMapper)]) extends PSTSource {
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/IsEventTypeSentence.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic
2 |
3 | import fsm.symbolic.logic.predicates.IsEventTypePredicate
4 |
5 | /**
6 | * A special class representing sentences for transitions that simply check the event type.
7 | *
8 | * @param predicate The predicate which must be an event type predicate.
9 | */
10 | class IsEventTypeSentence(predicate: Predicate) extends AtomicSentence(predicate, Set.empty) {
11 | require(predicate.isInstanceOf[IsEventTypePredicate])
12 | }
13 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/AbstractProvider.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider
2 |
3 | import workflow.condition.Condition
4 |
5 | abstract class AbstractProvider(conditions: List[Condition]) {
6 |
7 | private val checks: List[Boolean] = conditions.map(x => x.check())
8 | /*private var conditions = List[Condition]()
9 |
10 | def addCondition(cond: Condition): Unit = {
11 | conditions = cond :: conditions
12 | }*/
13 |
14 | def check(): List[Boolean] = checks
15 |
16 | def provide(): Object
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dsra/DSRASourceFromSREM.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dsra
2 |
3 | object DSRASourceFromSREM {
4 | def apply(
5 | sreFile: String,
6 | declarations: String
7 | ): DSRASourceFromSREM = new DSRASourceFromSREM(sreFile, declarations)
8 | }
9 |
10 | class DSRASourceFromSREM(
11 | val sreFile: String,
12 | val declarations: String
13 | ) extends DSRASource {
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASourceDFA.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | import fsm.classical.fa.dfa.DFA
4 | import model.vmm.mapper.Isomorphism
5 |
6 | object SDFASourceDFA {
7 | def apply(
8 | dfa: DFA,
9 | iso: Isomorphism
10 | ): SDFASourceDFA = new SDFASourceDFA(dfa, iso)
11 |
12 | }
13 |
14 | class SDFASourceDFA(
15 | val dfa: DFA,
16 | val iso: Isomorphism
17 | ) extends SDFASource {
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dfa/DFASourceFromSDFA.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dfa
2 |
3 | import fsm.symbolic.sfa.sdfa.SDFA
4 | import model.vmm.mapper.Isomorphism
5 |
6 | object DFASourceFromSDFA {
7 | def apply(
8 | sdfa: SDFA,
9 | iso: Isomorphism
10 | ): DFASourceFromSDFA = new DFASourceFromSDFA(sdfa, iso)
11 | }
12 |
13 | class DFASourceFromSDFA(
14 | val sdfa: SDFA,
15 | val iso: Isomorphism
16 | ) extends DFASource {
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/order/OrderSourceCrossVal.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.order
2 |
3 | import fsm.CountPolicy.CountPolicy
4 | import stream.source.StreamSource
5 |
6 | class OrderSourceCrossVal(
7 | val fsmType: String,
8 | val patternFile: String,
9 | val declarations: String,
10 | val streamSource: StreamSource,
11 | val policy: CountPolicy
12 | ) extends OrderSource {
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/matrix/MCSourceMLE.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.matrix
2 |
3 | import stream.source.StreamSource
4 | import workflow.provider.FSMProvider
5 |
6 | object MCSourceMLE {
7 | def apply(
8 | fsmp: FSMProvider,
9 | streamSource: StreamSource
10 | ): MCSourceMLE = new MCSourceMLE(fsmp, streamSource)
11 | }
12 |
13 | class MCSourceMLE(
14 | val fsmp: FSMProvider,
15 | val streamSource: StreamSource
16 | ) extends MatrixSource {
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/cef/src/main/scala/estimator/RunEstimator.scala:
--------------------------------------------------------------------------------
1 | package estimator
2 |
3 | import fsm.runtime.RunListener
4 |
5 | /**
6 | * Abstract class from which all other estimators should inherit. It is a RunListener and all sub-classes should
7 | * implement newEventProcessed and shutdown methods. The general idea is that you first feed an estimator with a
8 | * training stream in order to build a first model (or some first structures) and then you call estimate to build
9 | * the final model.
10 | */
11 | abstract class RunEstimator extends RunListener {
12 |
13 | def estimate(): Unit
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sra/SRAGuard.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sra
2 |
3 | import fsm.symbolic.logic.Sentence
4 | import fsm.symbolic.{Guard, Valuation}
5 | import stream.GenericEvent
6 |
7 | object SRAGuard {
8 | def apply(sentence: Sentence): SRAGuard = new SRAGuard(sentence)
9 | }
10 |
11 | class SRAGuard private[sra](override val sentence: Sentence) extends Guard(sentence = sentence) with Serializable {
12 |
13 | def check(
14 | event: GenericEvent,
15 | valuation: Valuation
16 | ): Boolean = sentence.evaluate(event, valuation)
17 |
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/forecaster/ForecasterSourceRandom.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.forecaster
2 |
3 | import workflow.provider.FSMProvider
4 |
5 | object ForecasterSourceRandom {
6 | def apply(
7 | fsmp: FSMProvider,
8 | horizon: Int
9 | ): ForecasterSourceRandom = new ForecasterSourceRandom(fsmp, horizon)
10 | }
11 |
12 | class ForecasterSourceRandom(
13 | val fsmp: FSMProvider,
14 | val horizon: Int
15 | ) extends ForecasterSource {
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/matrix/MCSourceProbs.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.matrix
2 |
3 | import model.markov.TransitionProbs
4 | import workflow.provider.FSMProvider
5 |
6 | object MCSourceProbs {
7 | def apply(
8 | fsmp: FSMProvider,
9 | probs: TransitionProbs
10 | ): MCSourceProbs = new MCSourceProbs(
11 | fsmp,
12 | probs
13 | )
14 | }
15 | class MCSourceProbs(
16 | val fsmp: FSMProvider,
17 | val probs: TransitionProbs
18 | ) extends MatrixSource {
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/rt/RTSourceEstimator.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.rt
2 |
3 | import stream.source.StreamSource
4 | import workflow.provider.FSMProvider
5 |
6 | object RTSourceEstimator {
7 | def apply(
8 | fsmp: FSMProvider,
9 | streamSource: StreamSource
10 | ): RTSourceEstimator = new RTSourceEstimator(fsmp, streamSource)
11 | }
12 |
13 | class RTSourceEstimator(
14 | val fsmp: FSMProvider,
15 | val streamSource: StreamSource
16 | ) extends RTSource {
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/hmm/HMMSourceEstimator.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.hmm
2 |
3 | import stream.source.StreamSource
4 | import workflow.provider.FSMProvider
5 |
6 | object HMMSourceEstimator {
7 | def apply(
8 | fsmp: FSMProvider,
9 | streamSource: StreamSource
10 | ): HMMSourceEstimator = new HMMSourceEstimator(fsmp, streamSource)
11 | }
12 |
13 | class HMMSourceEstimator(
14 | val fsmp: FSMProvider,
15 | val streamSource: StreamSource
16 | ) extends HMMSource {
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/TruePredicate.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 |
7 | /**
8 | * Implementation of the true predicate, a predicate for transitions that are always triggered, for every event.
9 | */
10 | case class TruePredicate(override val arguments: List[String]) extends Predicate(arguments) {
11 | override def evaluate(
12 | event: GenericEvent,
13 | valuation: Valuation
14 | ): Boolean = true
15 | }
16 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dsra/DSRASourceRegExp.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dsra
2 |
3 | import fsm.classical.pattern.regexp.RegExpTree
4 |
5 | object DSRASourceRegExp {
6 | def apply(
7 | re: RegExpTree,
8 | partitionAttribute: String,
9 | window: Int
10 | ): DSRASourceRegExp = new DSRASourceRegExp(re, partitionAttribute, window)
11 | }
12 |
13 | class DSRASourceRegExp(
14 | val re: RegExpTree,
15 | val partitionAttribute: String,
16 | val window: Int
17 | ) extends DSRASource {
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/nsra/NSRASourceRegExp.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.nsra
2 |
3 | import fsm.classical.pattern.regexp.RegExpTree
4 |
5 | object NSRASourceRegExp {
6 | def apply(
7 | re: RegExpTree,
8 | partitionAttribute: String,
9 | window: Int
10 | ): NSRASourceRegExp = new NSRASourceRegExp(re, partitionAttribute, window)
11 | }
12 |
13 | class NSRASourceRegExp(
14 | val re: RegExpTree,
15 | val partitionAttribute: String,
16 | val window: Int
17 | ) extends NSRASource {
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/snfa/SNFASourceRegExp.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.snfa
2 |
3 | import fsm.classical.pattern.regexp.RegExpTree
4 |
5 | object SNFASourceRegExp {
6 | def apply(
7 | re: RegExpTree,
8 | partitionAttribute: String,
9 | window: Int
10 | ): SNFASourceRegExp = new SNFASourceRegExp(re, partitionAttribute, window)
11 | }
12 |
13 | class SNFASourceRegExp(
14 | val re: RegExpTree,
15 | val partitionAttribute: String,
16 | val window: Int
17 | ) extends SNFASource {
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/TrueSentence.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic
2 |
3 | import fsm.symbolic.logic.predicates.TruePredicate
4 |
5 | /**
6 | * A special class representing sentences for true transitions, i.e., transitions that are triggered for every event.
7 | * True transitions are not the same as epsilon transitions. Epsilon transitions can be followed even without an event.
8 | * True transitions must consume an event.
9 | *
10 | * @param predicate The predicate which must be a True predicate.
11 | */
12 | class TrueSentence(predicate: Predicate) extends AtomicSentence(predicate, Set.empty) {
13 | require(predicate.isInstanceOf[TruePredicate])
14 | }
15 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/condition/Condition.scala:
--------------------------------------------------------------------------------
1 | package workflow.condition
2 |
3 | /**
4 | * Providers may need to have some conditions to be checks, e.g., that a SRE file with patterns exists.
5 | * Some of these conditions may be very similar. In order to avoid code duplication, we can create conditions with the
6 | * checking code by extending this trait.
7 | */
8 | trait Condition {
9 | /**
10 | * Every condition should implement this function. If it returns true (e.g., a file does indeed exist for a given
11 | * path), it means the condition is satisfied.
12 | *
13 | * @return True if the condition is satisfied,
14 | */
15 | def check(): Boolean
16 | }
17 |
--------------------------------------------------------------------------------
/cef/src/main/scala/utils/SerializationUtils.scala:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import java.io.{FileOutputStream, ObjectOutputStream}
4 |
5 | /**
6 | * Utils for serializing objects.
7 | */
8 | object SerializationUtils {
9 | /**
10 | * Serializes a list of objects and writes them to a file.
11 | *
12 | * @param l The list of objects.
13 | * @param fn The path to the file.
14 | * @tparam T The type of objects.
15 | */
16 | def write2File[T](
17 | l: List[T],
18 | fn: String
19 | ): Unit = {
20 | val oos = new ObjectOutputStream(new FileOutputStream(fn))
21 | oos.writeObject(l)
22 | oos.close()
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/array/PSAStream.scala:
--------------------------------------------------------------------------------
1 | package stream.array
2 |
3 | import model.vmm.pst.psa.ProbSuffixAutomaton
4 |
5 | /**
6 | * Creates a random event stream from a probabilistic suffix automaton.
7 | *
8 | * @param psa The probabilistic suffix automaton to act as event generator.
9 | * @param size The size of the stream.
10 | */
11 | class PSAStream private[stream] (
12 | psa: ProbSuffixAutomaton,
13 | size: Int
14 | ) extends EventStreamI {
15 |
16 | override def generateStream(): EventStream = {
17 | val (eventStream, _) = psa.generateStream(size)
18 | eventStream
19 | }
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/pattern/regexp/SymbolNode.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical.pattern.regexp
2 |
3 | object SymbolNode {
4 | def apply(symbol: String): SymbolNode = new SymbolNode(symbol, None)
5 |
6 | def apply(symbol: String, writeRegister: String): SymbolNode = new SymbolNode(symbol, Option(writeRegister))
7 | }
8 |
9 | /**
10 | * Each leaf of the tree is a terminal symbol.
11 | *
12 | * @param symbol The node's symbol, as a string.
13 | */
14 | case class SymbolNode(
15 | symbol: String,
16 | writeRegister: Option[String]
17 | ) extends RegExpTree {
18 | override def toString: String = "Symbol:" + symbol + "-WriteReg:" + writeRegister
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/wt/WtSourceRT.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.wt
2 |
3 | import workflow.provider.{FSMProvider, RemainingTimesProvider}
4 |
5 | object WtSourceRT {
6 | def apply(
7 | fsmp: FSMProvider,
8 | rtps: RemainingTimesProvider,
9 | horizon: Int,
10 | finalsEnabled: Boolean
11 | ): WtSourceRT = new WtSourceRT(fsmp, rtps, horizon, finalsEnabled)
12 | }
13 |
14 | class WtSourceRT(
15 | val fsmp: FSMProvider,
16 | val rtps: RemainingTimesProvider,
17 | val horizon: Int,
18 | val finalsEnabled: Boolean
19 | ) extends WtSource {
20 | require(horizon > 0)
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/wt/WtSourceMatrix.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.wt
2 |
3 | import workflow.provider.{FSMProvider, MarkovChainProvider}
4 |
5 | object WtSourceMatrix {
6 | def apply(
7 | fsmp: FSMProvider,
8 | mcps: MarkovChainProvider,
9 | horizon: Int,
10 | finalsEnabled: Boolean
11 | ): WtSourceMatrix = new WtSourceMatrix(fsmp, mcps, horizon, finalsEnabled)
12 | }
13 |
14 | class WtSourceMatrix(
15 | val fsmp: FSMProvider,
16 | val mcps: MarkovChainProvider,
17 | val horizon: Int,
18 | val finalsEnabled: Boolean
19 | ) extends WtSource {
20 | require(horizon > 0)
21 | }
22 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | import fsm.symbolic.sfa.sdfa.SDFA
4 |
5 | object SDFASourceDirect {
6 | def apply(
7 | sdfa: List[SDFA],
8 | partitionAttributes: List[String]
9 | ): SDFASourceDirect = new SDFASourceDirect(sdfa, partitionAttributes)
10 |
11 | def apply(sdfa: List[SDFA]): SDFASourceDirect = new SDFASourceDirect(sdfa, List.empty)
12 | }
13 |
14 | class SDFASourceDirect(
15 | val sdfa: List[SDFA],
16 | val partitionAttributes: List[String]
17 | ) extends SDFASource {
18 | require(partitionAttributes.isEmpty | (sdfa.size == partitionAttributes.size))
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spsa/SPSASourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spsa
2 |
3 | import model.vmm.pst.spsa.SymbolicPSA
4 |
5 | object SPSASourceDirect {
6 | def apply(
7 | spsa: List[SymbolicPSA],
8 | partitionAttributes: List[String]
9 | ): SPSASourceDirect = new SPSASourceDirect(spsa, partitionAttributes)
10 |
11 | def apply(spsa: List[SymbolicPSA]): SPSASourceDirect = new SPSASourceDirect(spsa, List.empty)
12 | }
13 |
14 | class SPSASourceDirect(
15 | val spsa: List[SymbolicPSA],
16 | val partitionAttributes: List[String]
17 | ) extends SPSASource {
18 | require(partitionAttributes.isEmpty | (spsa.size == partitionAttributes.size))
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/EQStr.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | case class EQStr(override val arguments: List[String]) extends Predicate(arguments) {
9 | override def evaluate(
10 | event: GenericEvent,
11 | valuation: Valuation
12 | ): Boolean = {
13 | require(arguments.size == 2)
14 | val variableValue = event.getValueOf(arguments.head).toString
15 | val constant = arguments(1)
16 | variableValue == constant
17 | }
18 |
19 | override def toString: String = "EQStr(" + list2Str(arguments, ",") + ")"
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dfa/DFASourceFromXML.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dfa
2 |
3 | import fsm.CountPolicy.CountPolicy
4 |
5 | object DFASourceFromXML {
6 |
7 | def apply(
8 | xmlFile: String,
9 | policy: CountPolicy,
10 | order: Int,
11 | streamSymbols: Set[String]
12 | ): DFASourceFromXML = new DFASourceFromXML(
13 | xmlFile,
14 | policy,
15 | order,
16 | streamSymbols
17 | )
18 | }
19 |
20 | class DFASourceFromXML(
21 | val xmlFile: String,
22 | val policy: CountPolicy,
23 | val order: Int,
24 | val streamSymbols: Set[String]
25 | ) extends DFASource {
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/forecaster/ForecasterNextSourceBuild.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.forecaster
2 |
3 | import workflow.provider.{FSMProvider, MarkovChainProvider}
4 |
5 | object ForecasterNextSourceBuild {
6 | def apply(
7 | fsmp: FSMProvider,
8 | mcp: MarkovChainProvider,
9 | confidenceThreshold: Double
10 | ): ForecasterNextSourceBuild = new ForecasterNextSourceBuild(fsmp, mcp, confidenceThreshold)
11 | }
12 |
13 | class ForecasterNextSourceBuild(
14 | val fsmp: FSMProvider,
15 | val mcp: MarkovChainProvider,
16 | val confidenceThreshold: Double
17 | ) extends ForecasterSource {
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dsra/DSRASourceDirect.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dsra
2 |
3 | import fsm.symbolic.sra.dsra.DSRAStreaming
4 |
5 | object DSRASourceDirect {
6 | def apply(
7 | dsra: List[DSRAStreaming],
8 | partitionAttributes: List[String]
9 | ): DSRASourceDirect = new DSRASourceDirect(dsra, partitionAttributes)
10 |
11 | def apply(dsra: List[DSRAStreaming]): DSRASourceDirect = new DSRASourceDirect(dsra, List.empty)
12 | }
13 |
14 | class DSRASourceDirect (
15 | val dsra: List[DSRAStreaming],
16 | val partitionAttributes: List[String]
17 | ) extends DSRASource {
18 | require(partitionAttributes.isEmpty | (dsra.size == partitionAttributes.size))
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/pattern/archived/Reader.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical.pattern.archived
2 |
3 | import scala.collection.mutable.Set
4 |
5 | class Reader() {
6 |
7 | def getModules(
8 | pattern: String,
9 | inputSymbols: Set[String]
10 | ): List[String] = {
11 | require(!inputSymbols.contains("#"))
12 | require(pattern.size > 0)
13 | var mods = List.empty[String]
14 | var mod = ""
15 | for (c <- pattern) {
16 | if (c == '#') {
17 | mods = mod :: mods
18 | mod = ""
19 | } else {
20 | if (!inputSymbols.contains(c.toString))
21 | throw new IllegalArgumentException("Pattern contains symbol not found in inputSymbols")
22 | mod += c
23 | }
24 | }
25 | mods = mod :: mods
26 | mods.reverse
27 | }
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/patterns/maritime/port/declarationsDistance1.sre:
--------------------------------------------------------------------------------
1 | +(DistanceBetweenPredicate(-4.47530,48.38273,5.0,6.0),DistanceBetweenPredicate(-4.47530,48.38273,6.0,7.0),DistanceBetweenPredicate(-4.47530,48.38273,7.0,8.0),DistanceBetweenPredicate(-4.47530,48.38273,8.0,9.0),DistanceBetweenPredicate(-4.47530,48.38273,9.0,10.0)),
2 | +(IsEventTypePredicate(RESET)),
3 | ~(IsEventTypePredicate(RESET),WithinCirclePredicate(-4.47530,48.38273,5.0),DistanceBetweenPredicate(-4.47530,48.38273,5.0,6.0),DistanceBetweenPredicate(-4.47530,48.38273,6.0,7.0),DistanceBetweenPredicate(-4.47530,48.38273,7.0,8.0),DistanceBetweenPredicate(-4.47530,48.38273,8.0,9.0),DistanceBetweenPredicate(-4.47530,48.38273,9.0,10.0)),
4 | ~(IsEventTypePredicate(RESET),OutsideCirclePredicate(-4.47530,48.38273,5.0)),
5 | ~(WithinCirclePredicate(-4.47530,48.38273,5.0),OutsideCirclePredicate(-4.47530,48.38273,5.0))
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/JsonLineParser.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | import play.api.libs.json.{JsObject, Json}
4 | import stream.GenericEvent
5 |
6 | object JsonLineParser extends LineParser {
7 |
8 | override def line2Event(
9 | line: String,
10 | id: Int
11 | ): GenericEvent = {
12 | val map = Json.parse(line).as[JsObject].value.toMap
13 | val timestamp = map.getOrElse("timestamp", id).toString.toLong
14 | GenericEvent(id, "GenericJson", timestamp, map)
15 | }
16 |
17 | override def line2Event(
18 | line: Seq[String],
19 | id: Int
20 | ): GenericEvent = throw new UnsupportedOperationException("Json domain does not have columns for each line")
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/cef/src/main/scala/db/DetectionsTable.scala:
--------------------------------------------------------------------------------
1 | package db
2 |
3 | import slick.jdbc.PostgresProfile.api._
4 | import ui.ConfigUtils
5 |
6 | /**
7 | * Table to store detected complex events.
8 | * For each complex event, we store its timestamp, the value of its partition attribute, the (final) state the
9 | * automaton was in when the event was detected and the input events that led to it.
10 | * @param tag
11 | */
12 | class DetectionsTable(tag: Tag) extends Table[(Int, Long, String, Int, String)](tag, Some(ConfigUtils.detectionsSchema), ConfigUtils.detectionsTable) {
13 | def id = column[Int]("DET_ID", O.PrimaryKey) // This is the primary key column
14 | def ts = column[Long]("ts")
15 | def attr = column[String]("partitionval")
16 | def state = column[Int]("state")
17 | def events = column[String]("events")
18 |
19 | def * = (id, ts, attr, state, events)
20 | }
21 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spsa/SPSASourcePSASerialized.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spsa
2 |
3 | import fsm.CountPolicy.CountPolicy
4 | import workflow.provider.PSAProvider
5 |
6 | object SPSASourcePSASerialized {
7 | def apply(
8 | patternFile: String,
9 | declarationsFile: String,
10 | psap: PSAProvider,
11 | policy: CountPolicy
12 | ): SPSASourcePSASerialized = new SPSASourcePSASerialized(patternFile, declarationsFile, psap, policy)
13 | }
14 |
15 | class SPSASourcePSASerialized(
16 | val patternFile: String,
17 | val declarationsFile: String,
18 | val psap: PSAProvider,
19 | val policy: CountPolicy
20 | ) extends SPSASource {
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/waitingTime/ForecastMethod.scala:
--------------------------------------------------------------------------------
1 | package model.waitingTime
2 |
3 | object ForecastMethod extends Enumeration {
4 | type ForecastMethod = Value
5 | val ARGMAX, FULLSCAN, SMARTSCAN, FIXEDSPREAD, CLASSIFY_NEXTK, CLASSIFY_WIN = Value
6 |
7 | def string2method(str: String): ForecastMethod = {
8 | str match {
9 | case "argmax" => ARGMAX
10 | case "full-scan" => FULLSCAN
11 | case "smart-scan" => SMARTSCAN
12 | case "fixed-spread" => FIXEDSPREAD
13 | case "classify-nextk" => CLASSIFY_NEXTK
14 | case "classify-win" => CLASSIFY_WIN
15 | case _ => throw new IllegalArgumentException("Forecast method not recognized " + str)
16 | }
17 | }
18 |
19 | def isClassification(fm: ForecastMethod): Boolean = fm == CLASSIFY_NEXTK | fm == CLASSIFY_WIN
20 |
21 | def isRegression(fm: ForecastMethod): Boolean = !isClassification(fm)
22 | }
23 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/IsEventTypePredicate.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | /**
9 | * A predicate that checks the type of the event and evaluates to true if it is equal to the given event type.
10 | * @param arguments 0 is the given event type
11 | */
12 | case class IsEventTypePredicate(override val arguments: List[String]) extends Predicate(arguments) {
13 | require(arguments.size == 1)
14 | val givenType: String = arguments(0)
15 |
16 | override def evaluate(
17 | event: GenericEvent,
18 | valuation: Valuation
19 | ): Boolean = event.eventType == givenType
20 |
21 | override def toString: String = "IsEventTypePredicate(" + list2Str(arguments, ",") + ")"
22 | }
23 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spsa/SPSASourceFromSRE.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spsa
2 |
3 | import fsm.CountPolicy.CountPolicy
4 | import stream.source.StreamSource
5 |
6 | object SPSASourceFromSRE {
7 | def apply(
8 | patternFile: String,
9 | declarationsFile: String,
10 | streamSource: StreamSource,
11 | policy: CountPolicy,
12 | maxNoStates: Int
13 | ): SPSASourceFromSRE = new SPSASourceFromSRE(patternFile, declarationsFile, streamSource, policy, maxNoStates)
14 | }
15 |
16 | class SPSASourceFromSRE(
17 | val patternFile: String,
18 | val declarationsFile: String,
19 | val streamSource: StreamSource,
20 | val policy: CountPolicy,
21 | val maxNoStates: Int
22 | ) extends SPSASource {
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/cef/src/main/scala/profiler/ForecastCollector.scala:
--------------------------------------------------------------------------------
1 | package profiler
2 |
3 | import fsm.runtime.RunMessage
4 | import model.forecaster.runtime.RelativeForecast
5 |
6 | /**
7 | * Collectors must inherit from this class.
8 | *
9 | * The main job of a forecast collector is to collect the produced forecasts and detections of its run. Each predictor
10 | * run creates internally a collector.
11 | */
12 | abstract class ForecastCollector {
13 | /**
14 | * The predictor run calls this method after every attempt to produce a forecast.
15 | *
16 | * @param rm The message sent from the FSM run to the predictor run.
17 | * @param forecast The forecast produced. Could be an empty one.
18 | */
19 | def collect(
20 | rm: RunMessage,
21 | forecast: RelativeForecast
22 | ): Unit
23 |
24 | /**
25 | * What happens when a RESET event arrives.
26 | */
27 | def reset(): Unit
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright (c) Elias Alevizos
2 |
3 | Wayeb comes with ABSOLUTELY NO WARRANTY.
4 |
5 | Wayeb follows a dual licensing scheme.
6 |
7 | For use by individuals,
8 | Wayeb is licensed under the Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.
9 | To view a copy of this license, visit https://creativecommons.org/licenses/by-nc-sa/4.0/
10 | or send a letter to Creative Commons, PO Box 1866, Mountain View, CA 94042, USA.
11 | This license is provided exclusively for research purposes.
12 | The results of any such research involving Wayeb must be made publicly available.
13 |
14 | For commercial/institutional/governmental use or any other use by private or public
15 | legal entities, sharing, modifying and distributing Wayeb or any derivatives of it
16 | in any form, such as source code, libraries and executables, requires the written
17 | permission of its author(s) (Elias Alevizos) and a possible request for licensing fees.
18 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/Predicate.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic
2 |
3 | import fsm.symbolic.Valuation
4 | import stream.GenericEvent
5 |
6 | /**
7 | * Abstract class for representing predicates. All custom predicates must extend this class and reside under
8 | * fsm.symbolic.sfa.logic.predicates.
9 | */
10 | abstract class Predicate(val arguments: List[String]) extends Serializable {
11 | /**
12 | * Each predicate must implement this method that evaluates it against an event and a valuation.
13 | *
14 | * @param event The event against which to evaluate the predicate.
15 | * @param valuation The valuation to be used, i.e., the register contents.
16 | * @return True if the predicate evaluates to true with the given event.
17 | */
18 | def evaluate(
19 | event: GenericEvent,
20 | valuation: Valuation
21 | ): Boolean
22 |
23 | def evaluate(event: GenericEvent): Boolean = evaluate(event, Valuation())
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/EQAttr.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | case class EQAttr(override val arguments: List[String]) extends Predicate(arguments) {
9 | override def evaluate(
10 | event: GenericEvent,
11 | valuation: Valuation
12 | ): Boolean = {
13 | require(arguments.size == 2)
14 | if (valuation.hasRegister(arguments(1))) {
15 | val attributeValue: Double = event.getValueOf(arguments(0)).toString.toDouble
16 | val storedEvent = valuation.v(arguments(1))
17 | val registerValue: Double = storedEvent.getValueOf(arguments(0)).toString.toDouble
18 | attributeValue == registerValue
19 | }
20 | else false
21 | }
22 |
23 | override def toString: String = "EQAttr(" + list2Str(arguments, ",") + ")"
24 | }
25 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/EQAttrStr.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | case class EQAttrStr(override val arguments: List[String]) extends Predicate(arguments) {
9 | override def evaluate(
10 | event: GenericEvent,
11 | valuation: Valuation
12 | ): Boolean = {
13 | require(arguments.size == 2)
14 | if (valuation.hasRegister(arguments(1))) {
15 | val attributeValue: String = event.getValueOf(arguments(0)).toString
16 | val storedEvent = valuation.v(arguments(1))
17 | val registerValue: String = storedEvent.getValueOf(arguments(0)).toString
18 | attributeValue == registerValue
19 | }
20 | else false
21 | }
22 |
23 | override def toString: String = "EQAttrStr(" + list2Str(arguments, ",") + ")"
24 | }
25 |
26 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/GTAttr.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | case class GTAttr(override val arguments: List[String]) extends Predicate(arguments) {
9 | override def evaluate(
10 | event: GenericEvent,
11 | valuation: Valuation
12 | ): Boolean = {
13 | require(arguments.size == 2)
14 | if (valuation.hasRegister(arguments(1))) {
15 | val attributeValue: Double = event.getValueOf(arguments(0)).toString.toDouble
16 | val storedEvent = valuation.v(arguments(1))
17 | val registerValue: Double = storedEvent.getValueOf(arguments(0)).toString.toDouble
18 | attributeValue > registerValue
19 | }
20 | else false
21 | }
22 |
23 | override def toString: String = "GTAttr(" + list2Str(arguments, ",") + ")"
24 | }
25 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/wt/WtSourceSPST.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.wt
2 |
3 | import workflow.provider.SPSTProvider
4 |
5 | object WtSourceSPST {
6 | def apply(
7 | spstProvider: SPSTProvider,
8 | horizon: Int,
9 | cutoffThreshold: Double,
10 | distance: (Double, Double)
11 | ): WtSourceSPST = new WtSourceSPST(spstProvider, horizon, cutoffThreshold, distance)
12 |
13 | def apply(
14 | spstProvider: SPSTProvider,
15 | horizon: Int,
16 | cutoffThreshold: Double
17 | ): WtSourceSPST = new WtSourceSPST(spstProvider, horizon, cutoffThreshold, distance = (0.0, 1.0))
18 | }
19 |
20 | class WtSourceSPST(
21 | val spstProvider: SPSTProvider,
22 | val horizon: Int,
23 | val cutoffThreshold: Double,
24 | val distance: (Double, Double)
25 | ) extends WtSource {
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/LTAttr.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | case class LTAttr(override val arguments: List[String]) extends Predicate(arguments) {
9 |
10 | override def evaluate(
11 | event: GenericEvent,
12 | valuation: Valuation
13 | ): Boolean = {
14 | require(arguments.size == 2)
15 | if (valuation.hasRegister(arguments(1))) {
16 | val attributeValue: Double = event.getValueOf(arguments(0)).toString.toDouble
17 | val storedEvent = valuation.v(arguments(1))
18 | val registerValue: Double = storedEvent.getValueOf(arguments(0)).toString.toDouble
19 | attributeValue < registerValue
20 | }
21 | else false
22 | }
23 |
24 | override def toString: String = "GTAttr(" + list2Str(arguments, ",") + ")"
25 | }
26 |
--------------------------------------------------------------------------------
/cef/src/main/scala/profiler/ProfilerInterface.scala:
--------------------------------------------------------------------------------
1 | package profiler
2 |
3 | /**
4 | * Every profiler must implement this trait.
5 | */
6 | trait ProfilerInterface {
7 | /**
8 | * Prints all calculated statistics.
9 | */
10 | def printProfileInfo(): Unit
11 |
12 | /**
13 | * Prints all calculated statistics and also writes them to a csv file.
14 | *
15 | * @param fn The path to the file.
16 | */
17 | def printProfileInfo(fn: String): Unit
18 |
19 | /**
20 | * Prints all calculated statistics and also writes them to a csv file. Additionally, the prefix will be written in
21 | * the first column.
22 | *
23 | * @param prefix The prefix.
24 | * @param fn The path to the file.
25 | */
26 | def printProfileInfo(prefix: String, fn: String): Unit
27 |
28 | /**
29 | * Should be called after stream has been consumed to estimate statistics and make them available for printing and
30 | * retrieval.
31 | */
32 | def estimateStats(): Unit
33 | }
34 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/EQ.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | /**
9 | * Checks if the value of a certain event attribute (must be Double) equals a given constant.
10 | * @param arguments 0 is the attribute name to be checked
11 | * 1 the given constant
12 | */
13 | case class EQ(override val arguments: List[String]) extends Predicate(arguments) {
14 | override def evaluate(
15 | event: GenericEvent,
16 | valuation: Valuation
17 | ): Boolean = {
18 | require(arguments.size == 2)
19 | val variableValue = event.getValueOf(arguments(0)).toString.toDouble
20 | val constant = arguments(1).toDouble
21 | variableValue == constant
22 | }
23 |
24 | override def toString: String = "EQ(" + list2Str(arguments, ",") + ")"
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/GT.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | /**
9 | * Checks if the value of a certain event attribute (must be Double) is above a given constant.
10 | * @param arguments 0 is the attribute name to be checked
11 | * 1 the given constant
12 | */
13 | case class GT(override val arguments: List[String]) extends Predicate(arguments) {
14 | override def evaluate(
15 | event: GenericEvent,
16 | valuation: Valuation
17 | ): Boolean = {
18 | require(arguments.size == 2)
19 | val variableValue = event.getValueOf(arguments(0)).toString.toDouble
20 | val constant = arguments(1).toDouble
21 | variableValue > constant
22 | }
23 |
24 | override def toString: String = "GT(" + list2Str(arguments, ",") + ")"
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/LT.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | /**
9 | * Checks if the value of a certain event attribute (must be Double) is below a given constant.
10 | * @param arguments 0 is the attribute name to be checked
11 | * 1 the given constant
12 | */
13 | case class LT(override val arguments: List[String]) extends Predicate(arguments) {
14 | override def evaluate(
15 | event: GenericEvent,
16 | valuation: Valuation
17 | ): Boolean = {
18 | require(arguments.size == 2)
19 | val variableValue = event.getValueOf(arguments(0)).toString.toDouble
20 | val constant = arguments(1).toDouble
21 | variableValue < constant
22 | }
23 |
24 | override def toString: String = "LT(" + list2Str(arguments, ",") + ")"
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/wt/WtSourceSPSTm.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.wt
2 |
3 | import workflow.provider.SPSTmProvider
4 |
5 | object WtSourceSPSTm {
6 | def apply(
7 | spstmProvider: SPSTmProvider,
8 | horizon: Int,
9 | cutoffThreshold: Double,
10 | distance: (Double, Double)
11 | ): WtSourceSPSTm = new WtSourceSPSTm(spstmProvider, horizon, cutoffThreshold, distance)
12 |
13 | def apply(
14 | spstmProvider: SPSTmProvider,
15 | horizon: Int,
16 | cutoffThreshold: Double
17 | ): WtSourceSPSTm = new WtSourceSPSTm(spstmProvider, horizon, cutoffThreshold, distance = (0.0, 1.0))
18 | }
19 |
20 | class WtSourceSPSTm (
21 | val spstmProvider: SPSTmProvider,
22 | val horizon: Int,
23 | val cutoffThreshold: Double,
24 | val distance: (Double, Double)
25 | ) extends WtSource {
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/GTE.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | /**
9 | * Checks if the value of a certain event attribute (must be Double) is above or equal a given constant.
10 | * @param arguments 0 is the attribute name to be checked
11 | * 1 the given constant
12 | */
13 | case class GTE(override val arguments: List[String]) extends Predicate(arguments) {
14 | override def evaluate(
15 | event: GenericEvent,
16 | valuation: Valuation
17 | ): Boolean = {
18 | require(arguments.size == 2)
19 | val variableValue = event.getValueOf(arguments(0)).toString.toDouble
20 | val constant = arguments(1).toDouble
21 | variableValue >= constant
22 | }
23 |
24 | override def toString: String = "GTE(" + list2Str(arguments, ",") + ")"
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/LTE.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | /**
9 | * Checks if the value of a certain event attribute (must be Double) is below or equal to a given constant.
10 | * @param arguments 0 is the attribute name to be checked
11 | * 1 the given constant
12 | */
13 | case class LTE(override val arguments: List[String]) extends Predicate(arguments) {
14 | override def evaluate(
15 | event: GenericEvent,
16 | valuation: Valuation
17 | ): Boolean = {
18 | require(arguments.size == 2)
19 | val variableValue = event.getValueOf(arguments(0)).toString.toDouble
20 | val constant = arguments(1).toDouble
21 | variableValue <= constant
22 | }
23 |
24 | override def toString: String = "LTE(" + list2Str(arguments, ",") + ")"
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/OutsideCirclePredicate.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 | import utils.SpatialUtils.withinCircle
8 |
9 | case class OutsideCirclePredicate(override val arguments: List[String]) extends Predicate(arguments) {
10 | val centerLon: Double = arguments(0).toDouble
11 | val centerLat: Double = arguments(1).toDouble
12 | val radius: Double = arguments(2).toDouble
13 |
14 | override def evaluate(
15 | event: GenericEvent,
16 | valuation: Valuation
17 | ): Boolean = {
18 | val lon = event.getValueOf("lon").toString.toDouble
19 | val lat = event.getValueOf("lat").toString.toDouble
20 | !withinCircle(lon, lat, centerLon, centerLat, radius)
21 | }
22 |
23 | override def toString: String = "OutsideCirclePredicate(" + list2Str(arguments, ",") + ")"
24 | }
25 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/pattern/regexp/OperatorNode.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical.pattern.regexp
2 |
3 | import fsm.classical.pattern.regexp.OperatorType._
4 |
5 | /**
6 | * Each internal node of the tree is an operator.
7 | *
8 | * @param operator The operator type of the node, ITER, UNION or CONCAT.
9 | * @param children The sub-expressions of the node.
10 | */
11 | case class OperatorNode(
12 | operator: OperatorType,
13 | children: List[RegExpTree]
14 | ) extends RegExpTree {
15 | require((operator == ITER & children.size == 1) | (children.size == 2), "ITER must have only a single child. UNION and CONCAT only two.")
16 |
17 | override def toString: String = operator.toString + "(" + childrenAsString + ")"
18 |
19 | /**
20 | * @return The node's sub-expressions as a string.
21 | */
22 | private def childrenAsString: String = {
23 | if (operator == ITER) children.head.toString
24 | else children.head.toString + children(1).toString
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/task/fsmTask/SNFATask.scala:
--------------------------------------------------------------------------------
1 | package workflow.task.fsmTask
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.SNFAInterface
5 | import workflow.provider.SNFAProvider
6 | import workflow.provider.source.snfa.SNFASourceFromSRE
7 | import workflow.task.Task
8 |
9 | object SNFATask {
10 | /**
11 | * Constructor for SNFA task.
12 | *
13 | * @param fn Path to file containing patterns
14 | * @return The SNFA task.
15 | */
16 | def apply(fn: String): SNFATask = new SNFATask(fn)
17 | }
18 |
19 | /**
20 | * Builds SNFA provider from a set of patterns.
21 | *
22 | * @param fn Path to file containing patterns
23 | */
24 | class SNFATask private (fn: String) extends Task with LazyLogging {
25 |
26 | override def execute(): List[SNFAInterface] = {
27 | logger.info("Executing SNFA task...")
28 | val snfap = SNFAProvider(new SNFASourceFromSRE(fn))
29 | val snfa = snfap.provide()
30 | logger.debug("SNFAs built.")
31 | logger.info("SNFA task done.")
32 | snfa
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/task/fsmTask/NSRATask.scala:
--------------------------------------------------------------------------------
1 | package workflow.task.fsmTask
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.NSRAInterface
5 | import workflow.provider.NSRAProvider
6 | import workflow.provider.source.nsra.NSRASourceFromSREM
7 | import workflow.task.Task
8 |
9 | object NSRATask {
10 | /**
11 | * Constructor for NSRA task.
12 | *
13 | * @param fn Path to file containing patterns.
14 | * @return The NSRA task.
15 | */
16 | def apply(fn: String): NSRATask = new NSRATask(fn)
17 | }
18 |
19 | /**
20 | * Builds NSRA provider from a set of patterns.
21 | *
22 | * @param fn Path to file containing patterns.
23 | */
24 | class NSRATask private (fn: String) extends Task with LazyLogging {
25 |
26 | override def execute(): List[NSRAInterface] = {
27 | logger.info("Executing NSRA task...")
28 | val nsrap = NSRAProvider(new NSRASourceFromSREM(fn))
29 | val nsra = nsrap.provide()
30 | logger.debug("NSRAs built.")
31 | logger.info("NSRA task done.")
32 | nsra
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/WithinCirclePredicate.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 | import utils.SpatialUtils.withinCircle
8 |
9 | case class WithinCirclePredicate(override val arguments: List[String]) extends Predicate(arguments) {
10 | private val centerLon: Double = arguments(0).toDouble
11 | private val centerLat: Double = arguments(1).toDouble
12 | private val radius: Double = arguments(2).toDouble
13 |
14 | override def evaluate(
15 | event: GenericEvent,
16 | valuation: Valuation
17 | ): Boolean = {
18 | val lon = event.getValueOf("lon").toString.toDouble
19 | val lat = event.getValueOf("lat").toString.toDouble
20 | withinCircle(lon, lat, centerLon, centerLat, radius)
21 | }
22 |
23 | override def toString: String = "WithinCirclePredicate(" + list2Str(arguments, ",") + ")"
24 | }
25 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASourceFromSRE.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | import fsm.CountPolicy.CountPolicy
4 | import ui.ConfigUtils
5 |
6 | object SDFASourceFromSRE {
7 | def apply(
8 | sreFile: String,
9 | policy: CountPolicy,
10 | declarations: String,
11 | minTermMethod: String
12 | ): SDFASourceFromSRE = new SDFASourceFromSRE(sreFile, policy, declarations, minTermMethod)
13 |
14 | def apply(
15 | sreFile: String,
16 | policy: CountPolicy,
17 | declarations: String
18 | ): SDFASourceFromSRE = new SDFASourceFromSRE(sreFile, policy, declarations, ConfigUtils.defaultMinTermMethod)
19 | }
20 |
21 | class SDFASourceFromSRE(
22 | val sreFile: String,
23 | val policy: CountPolicy,
24 | val declarations: String,
25 | val minTermMethod: String
26 | ) extends SDFASource {
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spstm/SPSTmSourceFromSREM.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spstm
2 |
3 | import stream.source.StreamSource
4 |
5 | object SPSTmSourceFromSREM {
6 | def apply(
7 | patternFile: String,
8 | declarationsFile: String,
9 | streamSource: StreamSource,
10 | pMin: Double,
11 | alpha: Double,
12 | gammaMin: Double,
13 | r: Double
14 | ): SPSTmSourceFromSREM = new SPSTmSourceFromSREM(patternFile, declarationsFile, streamSource, pMin, alpha, gammaMin, r)
15 | }
16 |
17 | class SPSTmSourceFromSREM(
18 | val patternFile: String,
19 | val declarationsFile: String,
20 | val streamSource: StreamSource,
21 | val pMin: Double,
22 | val alpha: Double,
23 | val gammaMin: Double,
24 | val r: Double
25 | ) extends SPSTmSource {
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/data/demo/data.csv:
--------------------------------------------------------------------------------
1 | A,1
2 | A,2
3 | B,3
4 | A,4
5 | A,5
6 | A,6
7 | C,7
8 | C,8
9 | C,9
10 | B,10
11 | B,11
12 | C,12
13 | C,13
14 | C,14
15 | C,15
16 | A,16
17 | A,17
18 | A,18
19 | A,19
20 | C,20
21 | A,21
22 | B,22
23 | C,23
24 | A,24
25 | A,25
26 | C,26
27 | A,27
28 | A,28
29 | B,29
30 | B,30
31 | C,31
32 | A,32
33 | B,33
34 | B,34
35 | C,35
36 | A,36
37 | C,37
38 | B,38
39 | B,39
40 | C,40
41 | C,41
42 | C,42
43 | A,43
44 | C,44
45 | B,45
46 | C,46
47 | A,47
48 | C,48
49 | C,49
50 | B,50
51 | A,51
52 | B,52
53 | C,53
54 | B,54
55 | B,55
56 | A,56
57 | A,57
58 | A,58
59 | C,59
60 | C,60
61 | A,61
62 | C,62
63 | C,63
64 | A,64
65 | A,65
66 | C,66
67 | C,67
68 | C,68
69 | A,69
70 | B,70
71 | A,71
72 | C,72
73 | B,73
74 | B,74
75 | B,75
76 | B,76
77 | B,77
78 | C,78
79 | B,79
80 | C,80
81 | C,81
82 | C,82
83 | B,83
84 | A,84
85 | B,85
86 | B,86
87 | B,87
88 | A,88
89 | A,89
90 | B,90
91 | C,91
92 | A,92
93 | A,93
94 | B,94
95 | C,95
96 | C,96
97 | A,97
98 | B,98
99 | B,99
100 | A,100
101 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/FATransition.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical
2 |
3 | object FATransition {
4 | /**
5 | * Constructs a transitions from source to target with symbol.
6 | *
7 | * @param source The id of the source state.
8 | * @param target The id of the target state.
9 | * @param symbol The symbol.
10 | * @return A transition.
11 | */
12 | def apply(
13 | source: Int,
14 | target: Int,
15 | symbol: String
16 | ): FATransition = new FATransition(source, target, symbol)
17 | }
18 |
19 | /**
20 | * Class representing transitions for classical finite automata.
21 | *
22 | * @param source The id of the source state.
23 | * @param target The id of the target state.
24 | * @param symbol The transition symbol.
25 | */
26 | class FATransition(
27 | val source: Int,
28 | val target: Int,
29 | val symbol: String
30 | ) {
31 |
32 | override def toString: String = {
33 | source + "-->" + target + "\t(" + symbol + ")"
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/vmm/mapper/SymbolExtractorFromDSRA.scala:
--------------------------------------------------------------------------------
1 | package model.vmm.mapper
2 |
3 | import fsm.symbolic.sra.Configuration
4 | import fsm.symbolic.sra.dsra.DSRASymbolized
5 | import model.vmm.Symbol
6 | import stream.GenericEvent
7 |
8 |
9 | object SymbolExtractorFromDSRA {
10 | def apply(dsra: DSRASymbolized): SymbolExtractorFromDSRA = new SymbolExtractorFromDSRA(dsra)
11 | }
12 |
13 | /**
14 | * Symbol mapper for symbolic automata with registers.
15 | * A symbolized dSRA must be given as argument.
16 | * A symbol is mapped to the transition that is triggered by an event, taking into account the contents of the
17 | * registers.
18 | *
19 | * @param dsra The symbolized dSRA.
20 | */
21 | class SymbolExtractorFromDSRA(val dsra: DSRASymbolized) extends SymbolMapper with Serializable {
22 |
23 | private var conf = Configuration(dsra.start)
24 |
25 | override def evaluate(event: GenericEvent): Symbol = {
26 | conf = dsra.yieldsSuccessorConfig(conf, event).head
27 | conf.symbol
28 | }
29 |
30 | override def getSymbols: List[Symbol] = dsra.getSymbols
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/GenericCSVLineParser.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import stream.{GenericEvent, ResetEvent}
5 |
6 | object GenericCSVLineParser extends LineParser with LazyLogging {
7 |
8 | /**
9 | * First column is the event type. Second column the timestamp.
10 | *
11 | * @param line A line, as a sequence of strings.
12 | * @param id The new event's unique id.
13 | * @return The line converted to an event.
14 | */
15 | override def line2Event(
16 | line: Seq[String],
17 | id: Int
18 | ): GenericEvent = {
19 | try {
20 | val eventType = line.head
21 | val timestamp = line(1).toLong
22 | if (timestamp == -1) ResetEvent()
23 | else {
24 | val ge = GenericEvent(id, eventType, timestamp)
25 | ge
26 | }
27 | } catch {
28 | case _: Exception => {
29 | logger.warn("COULD NOT PARSE LINE " + line)
30 | throw new Error
31 | }
32 | }
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/vmm/pst/psa/PSATransition.scala:
--------------------------------------------------------------------------------
1 | package model.vmm.pst.psa
2 |
3 | import model.vmm.Symbol
4 |
5 | object PSATransition {
6 | /**
7 | * Constructor for PSA transitions.
8 | *
9 | * @param target The target state.
10 | * @param symbol The transition's symbol.
11 | * @param prob The transition;s probability.
12 | * @return A PSA transition.
13 | */
14 | def apply(
15 | target: PSAState,
16 | symbol: Symbol, prob: Double
17 | ): PSATransition = new PSATransition(target, symbol, prob)
18 | }
19 |
20 | /**
21 | * Outgoing transition of PSAs.
22 | *
23 | * @param target The target state.
24 | * @param symbol The transition's symbol.
25 | * @param prob The transition;s probability.
26 | */
27 | class PSATransition(
28 | val target: PSAState,
29 | val symbol: Symbol,
30 | val prob: Double
31 | ) extends Serializable {
32 |
33 | override def toString: String = "Target: " + target.label + " with " + symbol + " and probability " + prob
34 | }
35 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/pst/PSTSourceLearnerFromSDFA.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.pst
2 |
3 | import stream.source.StreamSource
4 | import workflow.provider.SDFAProvider
5 |
6 | object PSTSourceLearnerFromSDFA {
7 | def apply(
8 | sdfap: SDFAProvider,
9 | trainStream: StreamSource,
10 | maxOrder: Int,
11 | pMin: Double,
12 | alpha: Double,
13 | gammaMin: Double,
14 | r: Double
15 | ): PSTSourceLearnerFromSDFA = new PSTSourceLearnerFromSDFA(sdfap, trainStream, maxOrder, pMin, alpha, gammaMin, r)
16 | }
17 |
18 | class PSTSourceLearnerFromSDFA(
19 | val sdfap: SDFAProvider,
20 | val trainStream: StreamSource,
21 | val maxOrder: Int,
22 | val pMin: Double,
23 | val alpha: Double,
24 | val gammaMin: Double,
25 | val r: Double
26 | ) extends PSTSource {
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/array/archived/CSVStream.scala:
--------------------------------------------------------------------------------
1 | package stream.array.archived
2 |
3 | import java.io.File
4 |
5 | import com.github.tototoshi.csv.CSVReader
6 | import stream.GenericEvent
7 | import stream.array.{EventStream, EventStreamI}
8 |
9 | class CSVStream private[stream] (fn: String) extends EventStreamI {
10 |
11 | def generateStream(): EventStream = {
12 | val eventStream = new EventStream()
13 | var counter = 0
14 | var eventTypes = Set.empty[String]
15 |
16 | val reader = CSVReader.open(new File(fn))
17 | for (line <- reader) {
18 | //val eventType: Char = line(0)(0)
19 | counter += 1
20 | val ne = createEvent(counter, line)
21 | eventStream.addEvent(createEvent(counter, line))
22 | eventTypes += ne.eventType
23 | }
24 | reader.close()
25 | eventStream.setEventTypes(eventTypes)
26 | eventStream
27 | }
28 |
29 | def createEvent(id: Int, attributes: Seq[String]): GenericEvent = {
30 | val eventType: Char = attributes(0)(0)
31 | val timestamp: Int = attributes(1).toInt
32 | GenericEvent(id, eventType.toString, timestamp)
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/pst/PSTSourceLearnerFromDSRA.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.pst
2 |
3 | import stream.source.StreamSource
4 | import workflow.provider.DSRAProvider
5 |
6 | object PSTSourceLearnerFromDSRA {
7 | def apply(
8 | dsrap: DSRAProvider,
9 | trainStream: StreamSource,
10 | maxOrder: Int,
11 | pMin: Double,
12 | alpha: Double,
13 | gammaMin: Double,
14 | r: Double
15 | ): PSTSourceLearnerFromDSRA = new PSTSourceLearnerFromDSRA(dsrap, trainStream, maxOrder, pMin, alpha, gammaMin, r)
16 | }
17 |
18 | class PSTSourceLearnerFromDSRA(
19 | val dsrap: DSRAProvider,
20 | val trainStream: StreamSource,
21 | val maxOrder: Int,
22 | val pMin: Double,
23 | val alpha: Double,
24 | val gammaMin: Double,
25 | val r: Double
26 | ) extends PSTSource {
27 |
28 | }
29 |
30 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/BT.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 |
8 | /**
9 | * Checks if the value of a certain event attribute (must be Double) falls within a range [min,max).
10 | * @param arguments 0 is the attribute name to be checked
11 | * 1 is the min of the range
12 | * 2 the max
13 | */
14 | case class BT(override val arguments: List[String]) extends Predicate(arguments) {
15 |
16 | override def evaluate(
17 | event: GenericEvent,
18 | valuation: Valuation
19 | ): Boolean = {
20 | require(arguments.size == 3)
21 | val attr = arguments(0)
22 | val min = arguments(1).toDouble
23 | val max = arguments(2).toDouble
24 | val variableValue = event.getValueOf(attr).toString.toDouble
25 | variableValue >= min & variableValue < max
26 | }
27 |
28 | override def toString: String = "BT(" + list2Str(arguments, ",") + ")"
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/forecaster/ForecasterHMMSourceBuild.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.forecaster
2 |
3 | import model.waitingTime.ForecastMethod.ForecastMethod
4 | import workflow.provider.{FSMProvider, HMMProvider}
5 |
6 | object ForecasterHMMSourceBuild {
7 | def apply(
8 | fsmp: FSMProvider,
9 | hmmp: HMMProvider,
10 | horizon: Int,
11 | confidenceThreshold: Double,
12 | maxSpread: Int,
13 | method: ForecastMethod
14 | ): ForecasterHMMSourceBuild =
15 | new ForecasterHMMSourceBuild(fsmp, hmmp, horizon, confidenceThreshold, maxSpread, method)
16 | }
17 |
18 | class ForecasterHMMSourceBuild(
19 | val fsmp: FSMProvider,
20 | val hmmp: HMMProvider,
21 | val horizon: Int,
22 | val confidenceThreshold: Double,
23 | val maxSpread: Int,
24 | val method: ForecastMethod
25 | ) extends ForecasterSource {
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/EpsilonPredicate.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 |
7 | /**
8 | * Implementation of the epsilon predicate, a predicate for transitions that are always triggered, even without any
9 | * events. This is actually a pseudo-predicate. Real predicates are always evaluated against an event and must override
10 | * the evaluate function. An epsilon predicate does not really need to evaluate anything. This implementation is here
11 | * just for consistency purposes since every transition is required to have a guard with a sentence.
12 | * TODO: It would possibly make more sense to just create a subclass of fsm.symbolic.sfa.Transition for epsilon transitions that would not have a sentence.
13 | *
14 | * @param arguments does not matter
15 | */
16 | case class EpsilonPredicate(override val arguments: List[String]) extends Predicate(arguments) {
17 | override def evaluate(
18 | event: GenericEvent,
19 | valuation: Valuation
20 | ): Boolean = true
21 | }
22 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spst/SPSTSourceFromSRE.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spst
2 |
3 | import fsm.CountPolicy.CountPolicy
4 | import stream.source.StreamSource
5 |
6 | object SPSTSourceFromSRE {
7 | def apply(
8 | patternFile: String,
9 | declarationsFile: String,
10 | streamSource: StreamSource,
11 | policy: CountPolicy,
12 | pMin: Double,
13 | alpha: Double,
14 | gammaMin: Double,
15 | r: Double
16 | ): SPSTSourceFromSRE =
17 | new SPSTSourceFromSRE(patternFile, declarationsFile, streamSource, policy, pMin, alpha, gammaMin, r)
18 | }
19 |
20 | class SPSTSourceFromSRE(
21 | val patternFile: String,
22 | val declarationsFile: String,
23 | val streamSource: StreamSource,
24 | val policy: CountPolicy,
25 | val pMin: Double,
26 | val alpha: Double,
27 | val gammaMin: Double,
28 | val r: Double
29 | ) extends SPSTSource {
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/cef/src/main/scala/estimator/HMMEstimator/IsoHMM.scala:
--------------------------------------------------------------------------------
1 | package estimator.HMMEstimator
2 |
3 | import model.vmm.mapper.Isomorphism
4 | import smile.sequence.HMM
5 |
6 | /**
7 | *
8 | * A class for representing a HMM corresponding to a symbolic automaton.
9 | *
10 | * @param hmm The HMM.
11 | * @param iso Each label is again an Int, corresponding to a minterm. We get a mapping from minterms to ints through an
12 | * isomorphism.
13 | * @param stateEncoding Each observation is an Int corresponding to a FSM state. However, we do not use the state
14 | * number directly. We create a mapping from the states to a list of increasing ints, with
15 | * stateEncoding.
16 | */
17 | class IsoHMM private[HMMEstimator] (
18 | val hmm: HMM[Int],
19 | val iso: Isomorphism,
20 | val stateEncoding: Map[Int, Int]
21 | ) {
22 |
23 | override def toString: String = {
24 | "ISO: " + iso.toString + "\n" +
25 | "HMM: " + hmm.toString + "\n" +
26 | "State Encoding: " + stateEncoding.toString()
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/ResetEvent.scala:
--------------------------------------------------------------------------------
1 | package stream
2 |
3 | object ResetEvent {
4 | /**
5 | * Constructor for reset events.
6 | *
7 | * @param extraArgs The map of extra attributes, if any. Could be empty. The partition attribute may be here.
8 | * @return A reset event.
9 | */
10 | def apply(extraArgs: Map[String, Any]): ResetEvent = new ResetEvent(extraArgs)
11 |
12 | /**
13 | * Constructor for reset events.
14 | *
15 | * @return A reset event.
16 | */
17 | def apply(): ResetEvent = new ResetEvent(Map.empty)
18 | }
19 |
20 | /**
21 | * RESET events are special events. They are not part of the stream. They are inserted whenever we want to create a
22 | * stream from many different, smaller substream. Reset events are used to stitch together the substreams. They must
23 | * separate the substreams. A RESET event resets a run, i.e., discards partial matches and sends the run back to its
24 | * start state.
25 | *
26 | * @param extraArgs The map of extra attributes, if any. Could be empty. The partition attribute may be here.
27 | */
28 | final class ResetEvent(extraArgs: Map[String, Any])
29 | extends GenericEvent(-1, "RESET", 0, extraArgs) {
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/predicates/DistanceBetweenPredicate.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic.predicates
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.logic.Predicate
5 | import stream.GenericEvent
6 | import utils.StringUtils.list2Str
7 | import utils.SpatialUtils.distanceBetween
8 |
9 | case class DistanceBetweenPredicate(override val arguments: List[String]) extends Predicate(arguments) {
10 | private val centerLon = arguments(0).toDouble
11 | private val centerLat = arguments(1).toDouble
12 | private val innerRadius = arguments(2).toDouble
13 | private val outerRadius = arguments(3).toDouble
14 |
15 | override def evaluate(
16 | event: GenericEvent,
17 | valuation: Valuation
18 | ): Boolean = {
19 | if (event.hasAttribute("lon") & event.hasAttribute("lat")) {
20 | val lon = event.getValueOf("lon").toString.toDouble
21 | val lat = event.getValueOf("lat").toString.toDouble
22 | distanceBetween(lon, lat, centerLon, centerLat, innerRadius, outerRadius)
23 | } else false // in case a RESET event appears
24 | }
25 |
26 | override def toString: String = "DistanceBetweenPredicate(" + list2Str(arguments, ",") + ")"
27 | }
28 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/vmm/CompleteProperSuffixSet.scala:
--------------------------------------------------------------------------------
1 | package Specs.vmm
2 |
3 | import breeze.stats.distributions.Uniform
4 | import com.typesafe.scalalogging.LazyLogging
5 | import org.junit.runner.RunWith
6 | import org.scalatest.FlatSpec
7 | import org.scalatestplus.junit.JUnitRunner
8 | import model.vmm.Symbol
9 | import model.vmm.pst.psa.PSAUtils
10 |
11 | @RunWith(classOf[JUnitRunner])
12 | class CompleteProperSuffixSet extends FlatSpec with LazyLogging {
13 | "A suffix set " should " be complete and proper " in {
14 | for (m <- 1 to 3) testOrder(m)
15 | }
16 |
17 | def testOrder(maxOrder: Int): Unit = {
18 | logger.debug("Testing creation of a complete and proper suffix set @ order " + maxOrder)
19 | val uniExpansion = new Uniform(0, 1)
20 | val expansionProb = uniExpansion.sample()
21 | val symbols = (1 to 5).toSet[Int].map(i => Symbol(i))
22 | logger.debug("Creating suffix sets with symbols/maxOrder/expansionProb" + symbols + "/" + maxOrder + "/" + expansionProb)
23 | val suffixes = PSAUtils.createCompleteProperFullSuffixSet(symbols, maxOrder, expansionProb)
24 | logger.debug("Created complete, proper, full set " + suffixes)
25 | assert(PSAUtils.isCompleteProperFull(suffixes, symbols, maxOrder))
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/cef/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 | wayeb.log
10 | false
11 |
12 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
13 |
14 |
15 |
16 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/pst/PSTSourceCST.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.pst
2 |
3 | import model.vmm.mapper.Isomorphism
4 | import model.vmm.pst.CounterSuffixTree
5 |
6 | object PSTSourceCST {
7 | def apply(
8 | cstIsosOrder: List[(CounterSuffixTree, Isomorphism, Int)],
9 | pMin: Double,
10 | alpha: Double,
11 | gammaMin: Double,
12 | r: Double
13 | ): PSTSourceCST = new PSTSourceCST(cstIsosOrder, pMin, alpha, gammaMin, r)
14 |
15 | def apply(
16 | cstIsos: List[(CounterSuffixTree, Isomorphism)],
17 | maxorder: Int,
18 | pMin: Double,
19 | alpha: Double,
20 | gammaMin: Double,
21 | r: Double
22 | ): PSTSourceCST = {
23 | val cstIsosOrder = cstIsos.map( ci => (ci._1, ci._2, maxorder))
24 | new PSTSourceCST(cstIsosOrder, pMin, alpha, gammaMin, r)
25 | }
26 | }
27 |
28 | class PSTSourceCST(
29 | val cstIsosOrder: List[(CounterSuffixTree, Isomorphism, Int)],
30 | val pMin: Double,
31 | val alpha: Double,
32 | val gammaMin: Double,
33 | val r: Double
34 | ) extends PSTSource {
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/dfa/DFASourceRegExp.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.dfa
2 |
3 | import fsm.CountPolicy.CountPolicy
4 | import fsm.classical.pattern.regexp.RegExpTree
5 | import ui.ConfigUtils
6 |
7 | object DFASourceRegExp {
8 | def apply(
9 | re: RegExpTree,
10 | policy: CountPolicy,
11 | order: Int,
12 | streamSymbols: Set[String],
13 | partitionAttribute: String
14 | ): DFASourceRegExp = new DFASourceRegExp(
15 | re,
16 | policy,
17 | order,
18 | streamSymbols,
19 | partitionAttribute
20 | )
21 |
22 | def apply(
23 | re: RegExpTree,
24 | policy: CountPolicy,
25 | order: Int,
26 | streamSymbols: Set[String]
27 | ): DFASourceRegExp = new DFASourceRegExp(
28 | re,
29 | policy,
30 | order,
31 | streamSymbols,
32 | ConfigUtils.singlePartitionVal
33 | )
34 | }
35 |
36 | class DFASourceRegExp(
37 | val re: RegExpTree,
38 | val policy: CountPolicy,
39 | val order: Int,
40 | val streamSymbols: Set[String],
41 | val partitionAttribute: String
42 | ) extends DFASource {
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sra/dsra/DSRA.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sra.dsra
2 |
3 | import fsm.symbolic.sra.{SRA, SRATransition}
4 |
5 | import java.io.{FileOutputStream, ObjectOutputStream}
6 |
7 | object DSRA {
8 | def apply(
9 | states: Map[Int, DSRAState],
10 | transitions: List[SRATransition],
11 | start: Int,
12 | finals: Set[Int]
13 | ): DSRA = new DSRA(states, transitions, start, finals)
14 | }
15 |
16 | /**
17 | * Class representing deterministic symbolic register automata.
18 | *
19 | * @param states The automaton states, as a map of state ids to states.
20 | * @param transitions The list of transitions.
21 | * @param start The id of the start state.
22 | * @param finals The set of ids of the final states.
23 | */
24 | case class DSRA private[dsra] (
25 | states: Map[Int, DSRAState],
26 | transitions: List[SRATransition],
27 | override val start: Int,
28 | override val finals: Set[Int]
29 | ) extends SRA(states, transitions, start, finals) {
30 |
31 | def write2File(fn: String): Unit = {
32 | val oos = new ObjectOutputStream(new FileOutputStream(fn))
33 | oos.writeObject(this)
34 | oos.close()
35 | }
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/LineParser.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | import stream.GenericEvent
4 |
5 | /**
6 | * Since for different domains we might need to parse lines differently, for each domain (when not JSON) we need to
7 | * create a class inheriting from this class and implement its methods.
8 | */
9 | abstract class LineParser(delimiter: String = ",") {
10 |
11 | /**
12 | * Every concrete CSV stream domain must implement this in order to determine how each line is to be converted to an
13 | * event.
14 | *
15 | * @param line A line, as a sequence of strings.
16 | * @param id The new event's unique id.
17 | * @return The line converted to an event.
18 | */
19 | def line2Event(
20 | line: Seq[String],
21 | id: Int
22 | ): GenericEvent
23 |
24 | /**
25 | * In case the input is just a single string, we first break it into a sequence of strings and then call
26 | * stream.domain.Domain#line2Event(java.lang.String, int, java.lang.String).
27 | *
28 | * @param line A line, as a string.
29 | * @param id The new event's unique id.
30 | * @return The line converted to an event.
31 | */
32 | def line2Event(
33 | line: String,
34 | id: Int
35 | ): GenericEvent = {
36 | line2Event(line.split(delimiter), id)
37 | }
38 |
39 | }
40 |
--------------------------------------------------------------------------------
/docs/building.md:
--------------------------------------------------------------------------------
1 | # Building Wayeb
2 |
3 | ## Requirements
4 |
5 | In order to build Wayeb from the source code you need to have Java SE version 8 or higher and
6 | [SBT](http://www.scala-sbt.org/) installed in your system.
7 | Java 8 is recommended.
8 |
9 | ## Building
10 |
11 | To build Wayeb, run the following command:
12 | ```
13 | $ sbt build
14 | ```
15 |
16 | This will compile Wayeb, run all unit tests and then create a fat jar.
17 | If you wan to skip the unit tests, run the command:
18 | ```
19 | $ sbt assembly
20 | ```
21 |
22 | If $WAYEB_HOME is the root directory of Wayeb,
23 | then the fat jar will be located under $WAYEB_HOME/cef/target/scala-2.12,
24 | with the name wayeb-0.3.0-SNAPSHOT.jar.
25 | This is a self-contained jar containing everything you might need to run Wayeb.
26 | You can copy it wherever you want.
27 |
28 | If you want to run the experiments described in **DBLP:journals/vldbj/AlevizosAP20**
29 | (see [How to cite Wayeb](docs/references.md)),
30 | then you also need to set $WAYEB_HOME as an environment variable.
31 | For example,
32 | ````
33 | $ export WAYEB_HOME=/root/dir/to/Wayeb
34 | ````
35 | If you want to permanently set $WAYEB_HOME,
36 | you may want to add this line to your .profile or .bashrc files.
37 |
38 | To see the options available when running Wayeb,
39 | run the following command:
40 | ````
41 | $ java -jar wayeb-0.6.0-SNAPSHOT.jar --help | more
42 | ````
43 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/vmm/PSAGenerator.scala:
--------------------------------------------------------------------------------
1 | package Specs.vmm
2 |
3 | import breeze.stats.distributions.Uniform
4 | import com.typesafe.scalalogging.LazyLogging
5 | import org.junit.runner.RunWith
6 | import org.scalatest.FlatSpec
7 | import org.scalatestplus.junit.JUnitRunner
8 | import model.vmm.Symbol
9 | import model.vmm.pst.psa.PSAUtils
10 | import ui.ConfigUtils
11 |
12 | @RunWith(classOf[JUnitRunner])
13 | class PSAGenerator extends FlatSpec with LazyLogging {
14 | "A randomly generated PSA " should " be valid " in {
15 | for (m <- 1 to ConfigUtils.maxOrder) testOrder(m)
16 | }
17 |
18 | def testOrder(maxOrder: Int): Unit = {
19 | val numberOfPSAs = 10
20 | logger.debug("Testing generation of PSA @ order " + maxOrder)
21 | for (i <- 1 to numberOfPSAs) {
22 | val uniExpansion = new Uniform(0, 1)
23 | val expansionProb = uniExpansion.sample()
24 | val symbols = (1 to ConfigUtils.symbolsNo).toSet[Int].map(i => Symbol(i))
25 | logger.debug("Generating PSA with maxOrder/expansionProb/symbols\n" + maxOrder + "/" + expansionProb + "/" + symbols)
26 | val psa = PSAUtils.createPSA(symbols, maxOrder, expansionProb)
27 | logger.debug("PSA generated\n " + psa.toString)
28 | logger.debug("PSA size/maxOrder: " + psa.size + "/" + psa.maxOrder)
29 | assert(PSAUtils.isSuffixFree(psa))
30 | assert(PSAUtils.isSuffixFull(psa))
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/domain/maritime/MaritimeLineParser.scala:
--------------------------------------------------------------------------------
1 | package stream.domain.maritime
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import stream.source.LineParser
5 | import stream.{GenericEvent, ResetEvent}
6 |
7 | object MaritimeLineParser extends LineParser with LazyLogging {
8 |
9 | override def line2Event(
10 | line: Seq[String],
11 | id: Int
12 | ): GenericEvent = {
13 | try {
14 | val timestamp = line(0).toLong
15 | val mmsi = line(1).toString
16 | val lon = line(2).toDouble
17 | val lat = line(3).toDouble
18 | val speed = line(4).toDouble
19 | val heading = line(5).toDouble
20 | val cog = line(6).toDouble
21 | val annotation = line(7)
22 | val nextCETimestamp = if (line.size > 8) line(8).toLong else -1
23 | if (timestamp == -1) ResetEvent(Map("mmsi" -> mmsi))
24 | else {
25 | val ge = GenericEvent(id, "SampledCritical", timestamp,
26 | Map("mmsi" -> mmsi, "speed" -> speed, "lon" -> lon, "lat" -> lat, "heading" -> heading,
27 | "cog" -> cog, "annotation" -> annotation, "nextCETimestamp" -> nextCETimestamp))
28 | ge
29 | }
30 | } catch {
31 | case e: Exception => {
32 | logger.warn("COULD NOT PARSE LINE " + line)
33 | throw new Error
34 | }
35 | }
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/next/NextForecasterBuilder.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster.next
2 |
3 | import fsm.FSMInterface
4 | import model.markov.MarkovChain
5 | import model.forecaster.runtime.Forecast
6 |
7 | object NextForecasterBuilder {
8 |
9 | /**
10 | * Creates a constructor for a next forecaster for a given FSM.
11 | *
12 | * @param fsm The FSM.
13 | * @param mc The FSM's Markov chain.
14 | * @return The forecaster builder.
15 | */
16 | def apply(
17 | fsm: FSMInterface,
18 | mc: MarkovChain
19 | ): NextForecasterBuilder = {
20 | val finalStates = fsm.getFinals
21 | val allStates = fsm.getStates
22 | val probs = allStates.map(nfs => (nfs, mc.getTransProbToStates(nfs, finalStates))).toMap
23 | val predictionsTable = probs.mapValues(prob => Forecast(1, 1, 1, prob))
24 | new NextForecasterBuilder(predictionsTable)
25 | }
26 |
27 | }
28 |
29 | /**
30 | * Builder for next forecasters.
31 | *
32 | * @param forecastsTable The table of single-point forecasts for the next predictor to be built.
33 | */
34 | class NextForecasterBuilder private(forecastsTable: Map[Int, Forecast]) {
35 |
36 | /**
37 | * Actually creates the forecaster.
38 | *
39 | * @return The next forecaster.
40 | */
41 | def createForecaster(): NextForecaster = {
42 | val np = NextForecaster(forecastsTable)
43 | np
44 | }
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/markov/MarkovChainFactory.scala:
--------------------------------------------------------------------------------
1 | package model.markov
2 |
3 | import breeze.linalg.DenseMatrix
4 | import fsm.FSMInterface
5 |
6 | class MarkovChainFactory {
7 |
8 | }
9 |
10 | /**
11 | * Factory for constructing Markov chains.
12 | */
13 | object MarkovChainFactory {
14 |
15 | /**
16 | * Constructs a Markov chain from a FSM and a set of conditional probabilities.
17 | *
18 | * @param fsm The FSM whose structure will guide the construction of the Markov chain.
19 | * The states of the FSM become Markov chain states.
20 | * @param probs A set of conditional probabilities with which the transition matrix will be filled.
21 | * @return The Markov chain.
22 | */
23 | def buildMC(
24 | fsm: FSMInterface,
25 | probs: TransitionProbs
26 | ): MarkovChain = MarkovChain(fsm, probs)
27 |
28 | /**
29 | * Constructs a Markov chain from a matrix, a mapping of FSM to Markov states and the number of final states.
30 | *
31 | * @param matrix The transition matrix.
32 | * @param state2Row The mapping of FSM to Markov states.
33 | * @param absorbingNo The number of final states.
34 | * @return The Markov chain.
35 | */
36 | def buildMC(
37 | matrix: DenseMatrix[Double],
38 | state2Row: Map[Int, Int],
39 | absorbingNo: Int
40 | ): MarkovChain = {
41 | MarkovChain(matrix, state2Row, absorbingNo)
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/array/XMLParser.scala:
--------------------------------------------------------------------------------
1 | package stream.array
2 |
3 | import scala.collection.mutable
4 | import scala.collection.mutable.Map
5 | import scala.xml._
6 |
7 | /**
8 | * Parses for XML files used to generate random streams.
9 | * Example xml file:
10 | *
11 | * 1000
12 | *
13 | * a
14 | * 0.5
15 | *
16 | *
17 | * b
18 | * 0.25
19 | *
20 | *
21 | * c
22 | * 0.25
23 | *
24 | *
25 | *
26 | * @param filename The path to the file.
27 | */
28 | class XMLParser(filename: String) {
29 | private val probs = mutable.Map.empty[String, Double]
30 | private val loadnode: Elem = XML.loadFile(filename)
31 | private val size: Int = (loadnode \\ "size").text.toInt
32 | private val events: NodeSeq = loadnode \\ "event"
33 | var etype = ""
34 | var prob = 0.0
35 | for (event <- events) {
36 | etype = (event \\ "type").text
37 | prob = (event \\ "probability").text.toDouble
38 | probs += (etype -> prob)
39 | }
40 | var totalProb = 0.0
41 | for ((k, v) <- probs) {
42 | totalProb += v
43 | }
44 | if (totalProb != 1.0) {
45 | throw new IllegalArgumentException
46 | }
47 |
48 | def getSize: Int = size
49 |
50 | def getProbs: mutable.Map[String, Double] = probs
51 |
52 | def getEventTypes: Set[String] = probs.keys.toSet
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/ArrayStreamSource.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | import stream.array.EventStream
4 | import stream.source.EmitMode.EmitMode
5 |
6 | object ArrayStreamSource {
7 | def apply(eventStream: EventStream): ArrayStreamSource = new ArrayStreamSource(eventStream)
8 | }
9 |
10 | /**
11 | * A stream source created from an already existing stream array.
12 | *
13 | * @param eventStream The stream array of events.
14 | */
15 | class ArrayStreamSource(eventStream: EventStream) extends StreamSource {
16 |
17 | /**
18 | * For BUFFER mode, simply return the array. For ONLINE, send all events to listeners.
19 | *
20 | * @param mode The mode, BUFFER or ONLINE.
21 | * @param timeout The time (in seconds) the source is allowed to run. After the timeout, the source should stop
22 | * emitting events. Irrelevant here.
23 | * @return The stream as an array of events.
24 | */
25 | override protected def emitEvents(
26 | mode: EmitMode,
27 | timeout: Long
28 | ): EventStream = {
29 | mode match {
30 | case EmitMode.BUFFER => eventStream
31 | case EmitMode.ONLINE => {
32 | val streamSize = eventStream.getSize
33 | for (i <- 0 until streamSize) {
34 | val event = eventStream.getEvent(i)
35 | send2Listeners(event)
36 | }
37 | eventStream
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sra/nsra/NSRA.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sra.nsra
2 |
3 | import fsm.symbolic.Valuation
4 | import fsm.symbolic.sra.{SRA, SRATransition}
5 | import stream.GenericEvent
6 |
7 | import java.io.{FileOutputStream, ObjectOutputStream}
8 |
9 | object NSRA {
10 | def apply(
11 | states: Map[Int, NSRAState],
12 | transitions: List[SRATransition],
13 | start: Int,
14 | finals: Set[Int]
15 | ): NSRA = new NSRA(states, transitions, start, finals)
16 | }
17 |
18 | /**
19 | * Class representing non-deterministic symbolic register automata.
20 | *
21 | * @param states The automaton states, as a map of state ids to states.
22 | * @param transitions The list of transitions.
23 | * @param start The id of the start state.
24 | * @param finals The set of ids of the final states.
25 | */
26 | case class NSRA private[nsra] (
27 | states: Map[Int, NSRAState],
28 | transitions: List[SRATransition],
29 | override val start: Int,
30 | override val finals: Set[Int]
31 | ) extends SRA(states, transitions, start, finals) {
32 |
33 | //override def getDeltaWithEpsilon(stateId: Int, event: GenericEvent): Set[Int] = Set.empty
34 |
35 | def write2File(fn: String): Unit = {
36 | val oos = new ObjectOutputStream(new FileOutputStream(fn))
37 | oos.writeObject(this)
38 | oos.close()
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASourceFormula.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | import fsm.symbolic.sre.SREFormula
4 | import fsm.CountPolicy.CountPolicy
5 | import fsm.symbolic.logic.{Predicate, Sentence}
6 | import ui.ConfigUtils
7 |
8 | object SDFASourceFormula {
9 | def apply(
10 | formulas: List[(SREFormula, Int, String, Int, String)],
11 | policy: CountPolicy,
12 | exclusives: Set[Set[Predicate]],
13 | extras: Set[Sentence],
14 | minTermMethod: String
15 | ): SDFASourceFormula = new SDFASourceFormula(
16 | formulas,
17 | policy,
18 | exclusives,
19 | extras,
20 | minTermMethod
21 | )
22 |
23 | def apply(
24 | formulas: List[(SREFormula, Int, String, Int, String)],
25 | policy: CountPolicy,
26 | exclusives: Set[Set[Predicate]],
27 | extras: Set[Sentence]
28 | ): SDFASourceFormula = new SDFASourceFormula(
29 | formulas,
30 | policy,
31 | exclusives,
32 | extras,
33 | ConfigUtils.defaultMinTermMethod
34 | )
35 | }
36 |
37 | class SDFASourceFormula(
38 | val formulas: List[(SREFormula, Int, String, Int, String)],
39 | val policy: CountPolicy,
40 | val exclusives: Set[Set[Predicate]],
41 | val extras: Set[Sentence],
42 | val minTermMethod: String
43 | ) extends SDFASource {
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### JetBrains template
3 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
4 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
5 |
6 | # User-specific stuff:
7 | .idea/workspace.xml
8 | .idea/tasks.xml
9 | .idea/dictionaries
10 | .idea/vcs.xml
11 | .idea/jsLibraryMappings.xml
12 |
13 | # Sensitive or high-churn files:
14 | .idea/dataSources.ids
15 | .idea/dataSources.xml
16 | .idea/dataSources.local.xml
17 | .idea/sqlDataSources.xml
18 | .idea/dynamic.xml
19 | .idea/uiDesigner.xml
20 |
21 | # Gradle:
22 | .idea/gradle.xml
23 | .idea/libraries
24 |
25 | # Mongo Explorer plugin:
26 | .idea/mongoSettings.xml
27 |
28 | ## File-based project format:
29 | *.iws
30 |
31 | ## Plugin-specific files:
32 |
33 | # IntelliJ
34 | /out/
35 |
36 | # mpeltonen/sbt-idea plugin
37 | .idea_modules/
38 |
39 | # JIRA plugin
40 | atlassian-ide-plugin.xml
41 |
42 | # Crashlytics plugin (for Android Studio and IntelliJ)
43 | com_crashlytics_export_strings.xml
44 | crashlytics.properties
45 | crashlytics-build.properties
46 | fabric.properties
47 | ### Scala template
48 | *.class
49 | *.log
50 |
51 | # sbt specific
52 | .cache
53 | .history
54 | .lib/
55 | dist/*
56 | target/
57 | lib_managed/
58 | src_managed/
59 | project/boot/
60 | project/plugins/project/
61 |
62 | # Scala-IDE specific
63 | .scala_dependencies
64 | .worksheet
65 |
66 | #Kafka
67 | #kafkaConfigs
68 |
69 | # mine
70 | .idea
71 | stats
72 |
73 | .DS_Store
74 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/source/JsonFileStreamSource.scala:
--------------------------------------------------------------------------------
1 | package stream.source
2 |
3 | import stream.array.EventStream
4 | import stream.source.EmitMode.EmitMode
5 |
6 | object JsonFileStreamSource {
7 | def apply(fn: String): JsonFileStreamSource = new JsonFileStreamSource(fn)
8 | }
9 |
10 | /**
11 | * Stream source for JSON files. Every event attribute in the JSON event is mapped to an attribute of the generic
12 | * event. In BUFFER mode, events are stored in an array of events. In ONLINE mode, events are sent to listeners.
13 | *
14 | * @param filename The path to the file.
15 | */
16 | class JsonFileStreamSource(filename: String) extends StreamSource {
17 |
18 | override protected def emitEvents(
19 | mode: EmitMode,
20 | timeout: Long
21 | ): EventStream = {
22 | val bufferedSource = io.Source.fromFile(filename)
23 | var totalCounter = 1
24 | val eventStream = new EventStream()
25 | var eventTypes = Set.empty[String]
26 | for (line <- bufferedSource.getLines) {
27 | val ge = JsonLineParser.line2Event(line, totalCounter)
28 | totalCounter += 1
29 | mode match {
30 | case EmitMode.BUFFER => {
31 | eventStream.addEvent(ge)
32 | eventTypes += ge.eventType
33 | }
34 | case EmitMode.ONLINE => send2Listeners(ge)
35 | }
36 | }
37 | bufferedSource.close()
38 | eventStream.setEventTypes(eventTypes)
39 | eventStream
40 | }
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/domain/homes/HomesLineParser.scala:
--------------------------------------------------------------------------------
1 | package stream.domain.homes
2 |
3 | import stream.GenericEvent
4 | import stream.source.LineParser
5 |
6 | import scala.util.matching.Regex
7 |
8 | object HomesLineParser extends LineParser {
9 | /**
10 | * Every concrete CSV stream domain must implement this in order to determine how each line is to be converted to an
11 | * event.
12 | *
13 | * @param line A line, as a sequence of strings.
14 | * @param id The new event's unique id.
15 | * @return The line converted to an event.
16 | */
17 | override def line2Event(
18 | line: Seq[String],
19 | id: Int
20 | ): GenericEvent = {
21 | val loadPattern = new Regex("LOAD\\(.+")
22 | val loadMatch = loadPattern.findFirstMatchIn(line.head)
23 | val ge: GenericEvent = loadMatch match {
24 | case Some(x) => {
25 | val eventType = "LOAD"
26 | val plug_timestamp = line(1).split("=")(1)
27 | val value = line(2).split("=")(1).toDouble
28 | val household_id = line(4).split("=")(1).dropRight(1).toLong
29 | val eventId = line.head.split("\\(")(1).split("=")(1).toInt
30 | val timestamp = plug_timestamp.toLong
31 | GenericEvent(eventId, eventType, timestamp, Map("plug_timestamp" -> plug_timestamp, "value" -> value, "householdId" -> household_id))
32 | }
33 | case _ => {
34 | throw new Exception("COULD NOT PARSE LINE")
35 | }
36 | }
37 | ge
38 |
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/runtime/RunRegistry.scala:
--------------------------------------------------------------------------------
1 | package fsm.runtime
2 |
3 | /**
4 | * The creation of FSM runs follows the Prototype creational design pattern.
5 | * See https://sourcemaking.com/design_patterns/prototype.
6 | * For each pattern/FSM, we maintain a prototype.
7 | * Whenever we need to create a new run, we clone the prototype.
8 | */
9 | class RunRegistry {
10 | // The prototypes. Each pattern/FSM has a unique id.
11 | private var prototypes = Map[Int, RunPrototype]()
12 |
13 | /**
14 | * Adds a new prototype.
15 | *
16 | * @param rp The new prototype.
17 | */
18 | def addPrototype(rp: RunPrototype): Unit = {
19 | val k = rp.getFsmId
20 | if (prototypes.contains(k))
21 | throw new IllegalArgumentException("There already exists a RunPrototype with id: " + k)
22 | prototypes += (k -> rp)
23 | }
24 |
25 | /**
26 | * Creates a run for a FSM by cloning the prototype.
27 | *
28 | * @param fsmId The id of the FSM.
29 | * @return The new run.
30 | */
31 | def findAndClone(
32 | fsmId: Int,
33 | runId: Int
34 | ): Run = {
35 | if (!prototypes.contains(fsmId))
36 | throw new IllegalArgumentException("RunRegistry has no prototypes for: " + fsmId)
37 | val rp = prototypes(fsmId)
38 | rp.cloneRun(runId)
39 | }
40 |
41 | /**
42 | * Clear all prototypes.
43 | */
44 | def clear(): Unit = prototypes = Map[Int, RunPrototype]()
45 |
46 | override def toString: String = prototypes.toString()
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/forecaster/ForecasterSourceBuild.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.forecaster
2 |
3 | import model.waitingTime.ForecastMethod.ForecastMethod
4 | import ui.ConfigUtils
5 | import workflow.provider.{FSMProvider, WtProvider}
6 |
7 | object ForecasterSourceBuild {
8 |
9 | def apply(
10 | fsmp: FSMProvider,
11 | wtdp: WtProvider,
12 | horizon: Int,
13 | confidenceThreshold: Double,
14 | maxSpread: Int,
15 | method: ForecastMethod
16 | ): ForecasterSourceBuild = new ForecasterSourceBuild(
17 | fsmp,
18 | wtdp,
19 | horizon,
20 | confidenceThreshold,
21 | maxSpread,
22 | method
23 | )
24 |
25 | def apply(
26 | fsmp: FSMProvider,
27 | wtdp: WtProvider,
28 | horizon: Int,
29 | confidenceThreshold: Double,
30 | maxSpread: Int
31 | ): ForecasterSourceBuild = new ForecasterSourceBuild(
32 | fsmp,
33 | wtdp,
34 | horizon,
35 | confidenceThreshold,
36 | maxSpread,
37 | ConfigUtils.defaultForeMethod
38 | )
39 |
40 | }
41 |
42 | class ForecasterSourceBuild(
43 | val fsmp: FSMProvider,
44 | val wtdp: WtProvider,
45 | val horizon: Int,
46 | val confidenceThreshold: Double,
47 | val maxSpread: Int,
48 | val method: ForecastMethod
49 | ) extends ForecasterSource {
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/classical/pattern/regexp/archived/Node.scala:
--------------------------------------------------------------------------------
1 | package fsm.classical.pattern.regexp.archived
2 |
3 | import fsm.classical.pattern.regexp.NodeType._
4 | import fsm.classical.pattern.regexp.OperatorType._
5 |
6 | class Node(
7 | nodeType: NodeType,
8 | operator: OperatorType,
9 | symbol: String
10 | ) {
11 | require((nodeType == OPERATOR & symbol == "") | (nodeType == SYMBOL & operator == NONE))
12 |
13 | private var children = List[Node]()
14 |
15 | def addChildren(ch: List[Node]): Unit = {
16 | require(nodeType == OPERATOR)
17 | require((operator == ITER & ch.size == 1) | ((operator == CONCAT | operator == UNION) & ch.size == 2))
18 | children = ch
19 | }
20 |
21 | def getLeftChild: Node = {
22 | require(nodeType == OPERATOR)
23 | children.head
24 | }
25 |
26 | def getRightChild: Node = {
27 | require(nodeType == OPERATOR)
28 | require(operator == CONCAT | operator == UNION)
29 | children(1)
30 | }
31 |
32 | def isLeaf: Boolean = children.isEmpty
33 |
34 | override def toString: String = {
35 | if (nodeType == OPERATOR) operator.toString
36 | else symbol
37 | }
38 |
39 | def toStringRecursively: String = {
40 | var s = ""
41 | s += toString
42 | if (nodeType == OPERATOR) {
43 | s += "("
44 | if (operator == ITER) s += getLeftChild.toStringRecursively
45 | else {
46 | s += getLeftChild.toStringRecursively
47 | s += getRightChild.toStringRecursively
48 | }
49 | s += ")"
50 | } else {
51 | s += ","
52 | }
53 | //if (nodeType==OPERATOR) s += ")"
54 | s
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/SFATransition.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa
2 |
3 | import fsm.symbolic.Transition
4 | import fsm.symbolic.TransitionOutput.{IGNORE, TAKE, TransitionOutput}
5 |
6 | object SFATransition {
7 | def apply(
8 | source: Int,
9 | target: Int,
10 | guard: SFAGuard,
11 | output: TransitionOutput
12 | ): SFATransition = new SFATransition(source, target, guard, output)
13 |
14 | /**
15 | * Constructor for transition.
16 | *
17 | * @param source The id of the source state.
18 | * @param target The id of the target state.
19 | * @param guard The transition guard, holding the transition sentence.
20 | * @return The transition.
21 | */
22 | def apply(
23 | source: Int,
24 | target: Int,
25 | guard: SFAGuard
26 | ): SFATransition = {
27 | if (guard.isEpsilon) new SFATransition(source, target, guard, IGNORE)
28 | else new SFATransition(source, target, guard, TAKE)
29 | }
30 | }
31 |
32 | /**
33 | * Class representing transitions of symbolic automata.
34 | *
35 | * @param source The id of the source state.
36 | * @param target The id of the target state.
37 | * @param guard The transition guard, holding the transition sentence.
38 | * @param output The output emitted by the transition.
39 | */
40 | class SFATransition(
41 | source: Int,
42 | target: Int,
43 | guard: SFAGuard,
44 | output: TransitionOutput
45 | ) extends Transition(source, target, guard, output) with Serializable {
46 | }
47 |
--------------------------------------------------------------------------------
/cef/src/main/scala/estimator/RemainingTimeEstimator/MeanRun.scala:
--------------------------------------------------------------------------------
1 | package estimator.RemainingTimeEstimator
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import estimator.RunEstimatorEngine
5 | import fsm.FSMInterface
6 | import stream.source.StreamSource
7 |
8 | object MeanRun {
9 | /**
10 | * Constructor for a Mean run.
11 | *
12 | * @param fsm The FSM for which we want to build a model.
13 | * @return The Mean run.
14 | */
15 | def apply(fsm: FSMInterface): MeanRun = new MeanRun(fsm)
16 | }
17 |
18 | /**
19 | * A MeanRun is responsible for running estimation of a remaining time model for a FSM.
20 | *
21 | * @param fsm The FSM for which we want to build a model.
22 | */
23 | class MeanRun private (fsm: FSMInterface) extends LazyLogging {
24 |
25 | /**
26 | * Method called to actually run estimation. For the same FSM, it can be called multiple times for different
27 | * training streams, if required.
28 | *
29 | * @param streamSource Source for the training stream.
30 | * @return The estimator holding the remaining times.
31 | */
32 | def estimateMeans(streamSource: StreamSource): MeanEstimator = {
33 | val learner = MeanEstimator(fsm)
34 | run(learner, streamSource)
35 | learner.estimate()
36 | learner
37 | }
38 |
39 | private def run(
40 | learner: MeanEstimator,
41 | streamSource: StreamSource
42 | ): MeanEstimator = {
43 | logger.info("Running Mean estimation...")
44 | val meane = RunEstimatorEngine(fsm, learner)
45 | streamSource.emitEventsToListener(meane)
46 | learner
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sra/nsra/Tracker.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sra.nsra
2 |
3 | import scala.collection.immutable.{Map, Set}
4 |
5 | /**
6 | * Class used to track which states we have checked during elimination.
7 | */
8 | class Tracker {
9 |
10 | private var statesSeen: Set[Int] = Set[Int]()
11 | private var statesToSee: Set[Int] = Set[Int]()
12 |
13 | private var id2Set: Map[Int, Set[Int]] = Map[Int, Set[Int]]()
14 | private var set2Id: Map[Set[Int], Int] = Map[Set[Int], Int]()
15 |
16 | def addSeenState(sd: Int): Unit = {
17 | require(statesToSee.contains(sd))
18 | statesSeen = statesSeen + sd
19 | statesToSee = statesToSee - sd
20 | }
21 |
22 | def addStateToSee(s: Set[Int]): Int = {
23 | if (isStateSeen(s)) set2Id(s)
24 | else if (isStateToSee(s)) set2Id(s)
25 | else {
26 | val newId = getNewStateId
27 | statesToSee = statesToSee + newId
28 | id2Set += (newId -> s)
29 | set2Id += (s -> newId)
30 | newId
31 | }
32 | }
33 |
34 | def isStateSeen(s: Set[Int]): Boolean = {
35 | if (set2Id.contains(s)) statesSeen(set2Id(s))
36 | else false
37 | }
38 |
39 | def isStateToSee(s: Set[Int]): Boolean = {
40 | if (set2Id.contains(s)) statesToSee(set2Id(s))
41 | else false
42 | }
43 |
44 | def getNewStateId: Int = statesSeen.size + statesToSee.size
45 |
46 | def hasStatesToSee: Boolean = statesToSee.nonEmpty
47 |
48 | def getStatesToSee: Map[Int, Set[Int]] = {
49 | var statesToSeeMap = Map[Int, Set[Int]]()
50 | for (s <- statesToSee) {
51 | statesToSeeMap += (s -> id2Set(s))
52 | }
53 | statesToSeeMap
54 | }
55 |
56 | }
57 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/array/ListStream.scala:
--------------------------------------------------------------------------------
1 | package stream.array
2 |
3 | import stream.GenericEvent
4 | import scala.collection.mutable
5 |
6 |
7 | /**
8 | * Converts a list of strings to an event stream. The list is assumed to contain the event types.
9 | *
10 | * @param l The list of strings.
11 | */
12 | class ListStream private[stream] (l: List[String]) extends EventStreamI {
13 | private val inputList = l
14 | private var probs = mutable.Map.empty[String, Double]
15 | private var cumulativeProbs = mutable.Map.empty[String, Double]
16 |
17 | /**
18 | * Actually generates the stream.
19 | *
20 | * @return The event stream.
21 | */
22 | def generateStream(): EventStream = {
23 | val counter = mutable.Map.empty[String, Int]
24 | val eventStream = new EventStream()
25 | var i = 0
26 | for (e <- inputList) {
27 | i += 1
28 | eventStream.addEvent(GenericEvent(e, i))
29 | if (counter.contains(e)) {
30 | counter(e) += 1
31 | } else {
32 | counter += (e -> 1)
33 | }
34 | }
35 | calculateProbs(counter)
36 | eventStream.setEventTypes(probs.keys.toSet)
37 | eventStream.setProbs(probs)
38 | eventStream.setCumulativeProbs(cumulativeProbs)
39 | eventStream
40 | }
41 |
42 | private def calculateProbs(counter: mutable.Map[String, Int]): Unit = {
43 | val size = inputList.size.toDouble
44 | var cumProb = 0.0
45 | for ((k, v) <- counter) {
46 | val p = v.toDouble / size
47 | probs += (k -> p)
48 | }
49 | for ((k, v) <- probs) {
50 | cumProb += v
51 | cumulativeProbs += (k -> cumProb)
52 | }
53 | }
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/random/RandomForecaster.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster.random
2 |
3 | import model.forecaster.ForecasterType
4 | import model.forecaster.ForecasterType.ForecasterType
5 | import model.forecaster.runtime.Forecast
6 |
7 | object RandomForecaster {
8 | /**
9 | * Constructor for random predictor.
10 | *
11 | * @param seed The seed for the random number generators.
12 | * @param horizon The horizon.
13 | * @return The random predictor.
14 | */
15 | def apply(
16 | seed: Int,
17 | horizon: Int
18 | ): RandomForecaster = new RandomForecaster(seed, horizon, ForecasterType.REGRESSION)
19 | }
20 |
21 | /**
22 | * This is a predictor that produces random forecast intervals within the given horizon.
23 | *
24 | * @param seed The seed for the random number generators.
25 | * @param horizon The horizon.
26 | * @param forecasterType Should always be REGRESSION.
27 | */
28 | class RandomForecaster private(
29 | val seed: Int,
30 | val horizon: Int,
31 | val forecasterType: ForecasterType
32 | ) {
33 |
34 | private val rs = new scala.util.Random(seed)
35 | private val re = new scala.util.Random(seed)
36 |
37 | /**
38 | * @return A random forecast interval within the horizon.
39 | */
40 | def getRandForecast: Forecast = {
41 | val start = rs.nextInt(horizon) + 1
42 | var end = start
43 | if (start != horizon) end = re.nextInt(horizon - start) + 1
44 | end += start
45 | Forecast(start, end, (start + end) / 2, 0.0)
46 | }
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/engine/EngineSpec.scala:
--------------------------------------------------------------------------------
1 | package Specs.engine
2 |
3 | import fsm.runtime.{Match, MatchDump}
4 | import org.junit.runner.RunWith
5 | import org.scalatest.FlatSpec
6 | import org.scalatestplus.junit.JUnitRunner
7 | import fsm.classical.pattern.regexp.RegExpUtils
8 | import stream.StreamFactory
9 | import stream.source.EmitMode
10 | import ui.ConfigUtils
11 | import workflow.provider.source.dfa.DFASourceRegExp
12 | import workflow.provider.{DFAProvider, FSMProvider}
13 | import workflow.task.engineTask.ERFTask
14 | import scala.collection.mutable.ListBuffer
15 |
16 | @RunWith(classOf[JUnitRunner])
17 | class EngineSpec extends FlatSpec {
18 | "The pattern a;b in sequence a,b,a,b " should " produce two matches {1,2} and {3,4} " in {
19 | val mypattern = RegExpUtils.getConcatStr(List("a", "b"))
20 | val policy = ConfigUtils.defaultPolicy
21 | val ss = StreamFactory.getStreamSource(List("a", "b", "a", "b"))
22 | val es = ss.emitEventsAndClose(EmitMode.BUFFER)
23 |
24 | val md = new MatchDump()
25 | val expectedMatch1 = Match()
26 | expectedMatch1.setEvents(ListBuffer(1, 2))
27 | expectedMatch1.setFull(true)
28 | val expectedMatch2 = Match()
29 | expectedMatch2.setEvents(ListBuffer(3, 4))
30 | expectedMatch2.setFull(true)
31 | md.addMatch(expectedMatch1)
32 | md.addMatch(expectedMatch2)
33 |
34 | val dfaProvider = DFAProvider(DFASourceRegExp(mypattern, policy, 0, es.getEventTypes))
35 | val fsmp = FSMProvider(dfaProvider)
36 | val erf = ERFTask(fsmp, ss, show = false, reset = false)
37 | val prof = erf.execute()
38 | prof.printMatches()
39 |
40 | assert(prof.getMatchDump.checkAgainst(md))
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/sdfa/SDFASourceRegExp.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.sdfa
2 |
3 | import fsm.CountPolicy.CountPolicy
4 | import fsm.classical.pattern.regexp.RegExpTree
5 | import fsm.symbolic.logic.{Predicate, Sentence}
6 | import ui.ConfigUtils
7 |
8 | object SDFASourceRegExp {
9 | def apply(
10 | re: RegExpTree,
11 | order: Int,
12 | partitionAttribute: String,
13 | window: Int,
14 | exclusives: Set[Set[Predicate]],
15 | extras: Set[Sentence],
16 | policy: CountPolicy
17 | ): SDFASourceRegExp = new SDFASourceRegExp(re, order, partitionAttribute, window, exclusives, extras, policy, ConfigUtils.defaultMinTermMethod)
18 |
19 | def apply(
20 | re: RegExpTree,
21 | order: Int,
22 | partitionAttribute: String,
23 | window: Int,
24 | exclusives: Set[Set[Predicate]],
25 | extras: Set[Sentence]
26 | ): SDFASourceRegExp = new SDFASourceRegExp(re, order, partitionAttribute, window, exclusives, extras, ConfigUtils.defaultPolicy, ConfigUtils.defaultMinTermMethod)
27 | }
28 |
29 | class SDFASourceRegExp(
30 | val re: RegExpTree,
31 | val order: Int,
32 | val partitionAttribute: String,
33 | val window: Int,
34 | val exclusives: Set[Set[Predicate]],
35 | val extras: Set[Sentence],
36 | val policy: CountPolicy,
37 | val minTermMethod: String
38 | ) extends SDFASource {
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/TruthTable.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic
2 |
3 | /**
4 | * For each assignment of truth values to predicates, a truth table contains a single truth value.
5 | * Useful when, for a sentence, we need to know when it evaluates to true, i.e., for what assignments of values to its
6 | * predicates the sentence evaluates to true.
7 | *
8 | * @param rows A map of assignments with their truth value.
9 | */
10 | case class TruthTable(rows: Map[Assignment, Boolean]) {
11 |
12 | /**
13 | * Checks whether this table "entails" another table, i.e., for every true row of this table, the row of the other
14 | * table must also be true.
15 | * Both tables must be the "same", i.e., they must have the same assignments.
16 | *
17 | * @param otherTable the other table to check.
18 | * @return True if this table entails the other table.
19 | */
20 | def entails(otherTable: TruthTable): Boolean = {
21 | val otherRows = otherTable.rows
22 | require(rows.keySet == otherRows.keySet)
23 | rows.forall(row => rowEntails(row._2, otherRows(row._1)))
24 | }
25 |
26 | /**
27 | * Checks if one row entails another, i.e., if row1 is true, so must be row2.
28 | *
29 | * @param row1 The truth value of the first row.
30 | * @param row2 The truth value of the second row.
31 | * @return True if row1 entails row2.
32 | */
33 | def rowEntails(
34 | row1: Boolean,
35 | row2: Boolean
36 | ): Boolean = if (!row1) true else row2
37 |
38 | override def toString: String = {
39 | rows.map(row => row._1.toString + "\t\t\t\t\t\t" + row._2).foldLeft("") { (acc, x) => acc + "\n" + x }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spst/SPSTSourceFromSDFA.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spst
2 |
3 | import stream.source.StreamSource
4 | import workflow.provider.SDFAProvider
5 | import workflow.provider.source.pst.{PSTSource, PSTSourceLearnerFromSDFA}
6 | import workflow.provider.source.sdfa.SDFASourceDirectI
7 |
8 | object SPSTSourceFromSDFA {
9 | def apply(
10 | sdfap: SDFAProvider,
11 | order: Int,
12 | trainStreamSource: StreamSource,
13 | pMin: Double,
14 | alpha: Double,
15 | gamma: Double,
16 | r: Double
17 | ): SPSTSourceFromSDFA = {
18 | val pstSource = PSTSourceLearnerFromSDFA(sdfap, trainStreamSource, order, pMin, alpha, gamma, r)
19 | SPSTSourceFromSDFA(sdfap, order, pstSource)
20 | }
21 |
22 | def apply(
23 | sdfap: SDFAProvider,
24 | order: Int,
25 | pstSource: PSTSource
26 | ): SPSTSourceFromSDFA = {
27 | val sdfais = sdfap.provide()
28 | val sdfapDirectI = SDFAProvider(SDFASourceDirectI(sdfais))
29 | val size = sdfais.size
30 | val orders = List.fill(size){order}
31 | new SPSTSourceFromSDFA(sdfapDirectI, orders, pstSource)
32 | }
33 |
34 | def apply(
35 | sdfap: SDFAProvider,
36 | orders: List[Int],
37 | pstSource: PSTSource
38 | ): SPSTSourceFromSDFA = new SPSTSourceFromSDFA(sdfap, orders, pstSource)
39 | }
40 |
41 | class SPSTSourceFromSDFA(
42 | val sdfap: SDFAProvider,
43 | val orders: List[Int],
44 | val pstSource: PSTSource,
45 | ) extends SPSTSource {
46 | }
47 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sre/Declaration.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sre
2 |
3 | import fsm.symbolic.logic.{LogicUtils, Predicate, Sentence}
4 | import utils.StringUtils.list2Str
5 |
6 | abstract class Declaration
7 |
8 | /**
9 | * A holder for the list of exclusives. Each fsm.symbolic.sre.LogicAtomicSentence is converted to a
10 | * fsm.symbolic.sfa.logic.Sentence.
11 | *
12 | * @param sentencesSRE The parsed pattern predicates to be converted to actual sentences.
13 | */
14 | case class Exclusive(sentencesSRE: List[LogicAtomicSentence]) extends Declaration {
15 | val sentences: List[Sentence] = sentencesSRE.map(s => LogicUtils.parsed2ActualSentence(s))
16 | val predicates: Set[Predicate] = sentences.foldLeft(Set.empty[Predicate]) { (acc, x) => acc ++ x.extractPredicates }
17 |
18 | val getPredicates: Set[Predicate] = predicates
19 |
20 | override def toString: String = "Exclusive(" + list2Str(predicates.toList, ",") + ")"
21 | }
22 |
23 | /**
24 | * A holder for the list of extras. Each fsm.symbolic.sre.LogicAtomicSentence is converted to a
25 | * * fsm.symbolic.sfa.logic.Sentence.
26 | *
27 | * @param sentencesSRE The parsed pattern predicates to be converted to actual sentences.
28 | */
29 | case class Extras(sentencesSRE: List[LogicAtomicSentence]) extends Declaration {
30 | val sentences: List[Sentence] = sentencesSRE.map(s => LogicUtils.parsed2ActualSentence(s))
31 | val predicates: Set[Predicate] = sentences.foldLeft(Set.empty[Predicate]) { (acc, x) => acc ++ x.extractPredicates }
32 |
33 | val getPredicates: Set[Predicate] = predicates
34 |
35 | val getSentences: List[Sentence] = sentences
36 |
37 | override def toString: String = "Extras(" + list2Str(predicates.toList, ",") + ")"
38 | }
39 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/next/NextForecaster.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster.next
2 |
3 | import model.forecaster.ForecasterType
4 | import model.forecaster.ForecasterType.ForecasterType
5 | import model.forecaster.runtime.Forecast
6 |
7 | object NextForecaster {
8 | /**
9 | * Constructs a next forecaster.
10 | *
11 | * @param predictionsTable The table of single-point predictions.
12 | * @return The next forecaster.
13 | */
14 | private[next] def apply(predictionsTable: Map[Int, Forecast]): NextForecaster =
15 | new NextForecaster(predictionsTable, ForecasterType.REGRESSION)
16 | }
17 |
18 | /**
19 | * This is a forecaster that predicts how probable it is to reach a final state at the next point.
20 | * Its forecasts are always single point, referring to the next point in the stream.
21 | *
22 | * @param forecastsTable The table of single-point forecasts.
23 | * @param forecasterType Should always be REGRESSION.
24 | */
25 | class NextForecaster private(
26 | val forecastsTable: Map[Int, Forecast],
27 | val forecasterType: ForecasterType
28 | ) {
29 |
30 | /**
31 | * Retrieves the forecast interval (which is single-point actually) for a given state from the table of predictions.
32 | *
33 | * @param state The id of the given state.
34 | * @return The forecast interval.
35 | */
36 | def getForecastFromTable(state: Int): Forecast = {
37 | val tablePred = forecastsTable(state)
38 | tablePred
39 | }
40 |
41 | /**
42 | * @return The ids of all the states for which we have entries in the predictions table.
43 | */
44 | def getStates: Set[Int] = forecastsTable.keySet
45 | }
46 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/task/predictorTask/PredictorRandomTask.scala:
--------------------------------------------------------------------------------
1 | package workflow.task.predictorTask
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import model.forecaster.RandomInterface
5 | import model.forecaster.random.RandomForecaster
6 | import ui.ConfigUtils
7 | import workflow.provider.FSMProvider
8 | import workflow.task.Task
9 |
10 | object PredictorRandomTask {
11 |
12 | /**
13 | * Constructor for random predictor task.
14 | *
15 | * @param fsmp The provider for the FSMs.
16 | * @param horizon The horizon.
17 | * @return A random predictor task.
18 | */
19 | def apply(
20 | fsmp: FSMProvider,
21 | horizon: Int
22 | ): PredictorRandomTask = new PredictorRandomTask(fsmp, horizon)
23 |
24 | }
25 |
26 | /**
27 | * Creates random predictors.
28 | *
29 | * @param fsmp The provider for the FSMs.
30 | * @param horizon The horizon.
31 | */
32 | class PredictorRandomTask private (
33 | fsmp: FSMProvider,
34 | horizon: Int
35 | ) extends Task with LazyLogging {
36 |
37 | private val randPredSeed = ConfigUtils.randPredSeed
38 |
39 | /**
40 | * Executes the task.
41 | *
42 | * @return A list of random predictor interfaces, one for each FSM.
43 | */
44 | override def execute(): List[RandomInterface] = {
45 | logger.info("Executing random predictor task...")
46 | val fsmList = fsmp.provide()
47 | val randpp = fsmList.map(fsm => (fsm.getId, fsm.getNonFinals, RandomForecaster(randPredSeed, horizon)))
48 | val ris = randpp.map(r => RandomInterface(r._1, r._3, r._2))
49 | logger.info("done.")
50 | ris
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/runtime/ForecasterRegistry.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster.runtime
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 |
5 | /**
6 | * The creation of forecaster runs follows the Prototype creational design pattern.
7 | * See https://sourcemaking.com/design_patterns/prototype.
8 | * For each pattern/FSM, we maintain a prototype.
9 | * Whenever we need to create a new forecaster run, we clone the prototype.
10 | */
11 | class ForecasterRegistry() extends LazyLogging {
12 |
13 | // For each FSM, we have a prototype. The key is the FSM id.
14 | private var prototypes = Map[Int, ForecasterPrototype]()
15 |
16 | /**
17 | * Adds a new prototype.
18 | *
19 | * @param pp The new prototype.
20 | */
21 | def addPrototype(pp: ForecasterPrototype): Unit = {
22 | val k = pp.getInterfaceId
23 | if (prototypes.contains(k)) {
24 | logger.error("There already exists a ForecasterPrototype with name: " + k)
25 | throw new IllegalArgumentException("There already exists a ForecasterPrototype with name: " + k)
26 | }
27 | prototypes += (k -> pp)
28 | }
29 |
30 | /**
31 | * Creates a forecaster run for a FSM by cloning the prototype.
32 | *
33 | * @param fsmId The id of the FSM.
34 | * @return The new predictor run.
35 | */
36 | def findAndClone(
37 | fsmId: Int,
38 | runId: Int
39 | ): ForecasterRun = {
40 | if (!prototypes.contains(fsmId)) {
41 | logger.error("ForecasterRegistry has no prototypes for: " + fsmId)
42 | throw new IllegalArgumentException("ForecasterRegistry has no prototypes for: " + fsmId)
43 | }
44 | val pp = prototypes(fsmId)
45 | pp.cloneForecaster(runId)
46 | }
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/provider/source/spstm/SPSTmSourceFromDSRA.scala:
--------------------------------------------------------------------------------
1 | package workflow.provider.source.spstm
2 |
3 | import stream.source.StreamSource
4 | import workflow.provider.DSRAProvider
5 | import workflow.provider.source.dsra.DSRASourceDirectI
6 | import workflow.provider.source.pst.{PSTSource, PSTSourceLearnerFromDSRA}
7 |
8 | object SPSTmSourceFromDSRA {
9 | def apply(
10 | dsrap: DSRAProvider,
11 | order: Int,
12 | trainStreamSource: StreamSource,
13 | pMin: Double,
14 | alpha: Double,
15 | gamma: Double,
16 | r: Double
17 | ): SPSTmSourceFromDSRA = {
18 | val pstSource = PSTSourceLearnerFromDSRA(dsrap, trainStreamSource, order, pMin, alpha, gamma, r)
19 | SPSTmSourceFromDSRA(dsrap, order, pstSource)
20 | }
21 |
22 | def apply(
23 | dsrap: DSRAProvider,
24 | order: Int,
25 | pstSource: PSTSource
26 | ): SPSTmSourceFromDSRA = {
27 | val dsrais = dsrap.provide()
28 | val dsrapDirectI = DSRAProvider(DSRASourceDirectI(dsrais))
29 | val size = dsrais.size
30 | val orders = List.fill(size) {
31 | order
32 | }
33 | new SPSTmSourceFromDSRA(dsrapDirectI, orders, pstSource)
34 | }
35 |
36 | def apply(
37 | dsrap: DSRAProvider,
38 | orders: List[Int],
39 | pstSource: PSTSource
40 | ): SPSTmSourceFromDSRA = new SPSTmSourceFromDSRA(dsrap, orders, pstSource)
41 | }
42 |
43 | class SPSTmSourceFromDSRA(
44 | val dsrap: DSRAProvider,
45 | val orders: List[Int],
46 | val pstSource: PSTSource
47 | ) extends SPSTmSource {
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/selection/TransformToStrict.scala:
--------------------------------------------------------------------------------
1 | package Specs.selection
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.symbolic.sfa.snfa.SNFAUtils
5 | import fsm.symbolic.sre._
6 | import org.junit.runner.RunWith
7 | import org.scalatest.FlatSpec
8 | import org.scalatestplus.junit.JUnitRunner
9 | import stream.GenericEvent
10 | import ui.ConfigUtils
11 | import utils.testing.{PatternGenerator, SymbolWordGenerator}
12 |
13 | @RunWith(classOf[JUnitRunner])
14 | class TransformToStrict extends FlatSpec with LazyLogging {
15 | "Applying strict contiguity to a formula " should " have no effect " in {
16 | testPatterns()
17 | }
18 |
19 | def testPatterns(): Unit = {
20 | logger.debug("test patterns")
21 | val patterns = PatternGenerator.generateRegExpPatterns(ConfigUtils.noOfPatterns, Set("A", "B", "C"), ConfigUtils.patternMaxDepth)
22 | val words = SymbolWordGenerator.generateStrings(Set("A", "B", "C"), ConfigUtils.wordMaxLength)
23 | for (p <- patterns) {
24 | val formula = SREUtils.re2formula(p)
25 | val strictFormula = SREUtils.applySelection(formula, SelectionStrategy.STRICT)
26 | logger.debug("Testing pattern/formula: " + p.toString + "/" + formula.toString)
27 | val snfa = SNFAUtils.buildSNFA(formula)
28 | val strictSnfa = SNFAUtils.buildSNFA(strictFormula)
29 | for (word <- words) {
30 | logger.debug("...with word " + word)
31 | val events = word.map(c => GenericEvent(c, 0))
32 | val snfaAccepts = snfa.accepts(events)
33 | val strictSnfaAccepts = strictSnfa.accepts(events)
34 | logger.debug("\t\t Result snfa/strictSnfa: " + snfaAccepts + "/" + strictSnfaAccepts)
35 | assert(snfaAccepts == strictSnfaAccepts)
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/classical/nfa/NFA2DFA.scala:
--------------------------------------------------------------------------------
1 | package Specs.classical.nfa
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.classical.fa.dfa.DFAUtils
5 | import fsm.classical.fa.nfa.NFAFactory
6 | import org.junit.runner.RunWith
7 | import org.scalatest.FlatSpec
8 | import org.scalatestplus.junit.JUnitRunner
9 | import ui.ConfigUtils
10 | import utils.testing.{PatternGenerator, SymbolWordGenerator}
11 |
12 | @RunWith(classOf[JUnitRunner])
13 | class NFA2DFA extends FlatSpec with LazyLogging {
14 | "epsilon-NFAs " should " be converted to equivalent DFAs and these DFAs should produce same results as AC DFAs " in {
15 | testPatterns()
16 | }
17 |
18 | def testPatterns(): Unit = {
19 | logger.debug("Testing with patterns and words.")
20 | val patterns = PatternGenerator.generateRegExpPatterns(
21 | ConfigUtils.noOfPatterns,
22 | Set("a", "b", "c"),
23 | ConfigUtils.patternMaxDepth
24 | )
25 | val words = SymbolWordGenerator.generateStrings(Set("a", "b", "c"), ConfigUtils.wordMaxLength)
26 | for (p <- patterns) {
27 | logger.debug("Testing epsilon-NFA to DFA (eq) for pattern: " + p.toString)
28 | for (word <- words) {
29 | logger.debug("...with word " + word)
30 | val nfap = NFAFactory.buildNFA(p, word.toSet)
31 | val elnfap = NFAFactory.buildEliminatedNFA(p, word.toSet)
32 | assert(elnfap.isDFAEquivalent)
33 | val dfap = DFAUtils.convertNfa2Dfa(elnfap)
34 | logger.debug("Pattern: " + p.toString + " Word: " + word + "/" + nfap.accepts(word) + "/" + elnfap.accepts(word) + "/" + dfap.accepts(word))
35 | assert(nfap.accepts(word) == dfap.accepts(word))
36 | assert(elnfap.accepts(word) == dfap.accepts(word))
37 | }
38 | }
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/Transition.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic
2 |
3 | import fsm.symbolic.TransitionOutput.TransitionOutput
4 | import fsm.symbolic.logic.Sentence
5 | import fsm.symbolic.sra.SRATransition
6 | import stream.GenericEvent
7 |
8 |
9 | /**
10 | * Abstract class representing automaton transitions.
11 | *
12 | * @param source The id of the source state.
13 | * @param target The id of the target state.
14 | * @param guard The guard that must evaluate to true for the transition to be triggered.
15 | * @param output The output emitted by the transition.
16 | */
17 | abstract class Transition(
18 | val source: Int,
19 | val target: Int,
20 | val guard: Guard,
21 | val output: TransitionOutput
22 | ) extends Serializable {
23 | /**
24 | * Evaluates the transition against an event.
25 | *
26 | * @param event The event to check.
27 | * @return True if the transition is triggered.
28 | */
29 | def enabled(event: GenericEvent): Boolean = guard.check(event)
30 |
31 | /**
32 | * Checks whether this is an epsilon transition.
33 | *
34 | * @return True if epsilon.
35 | */
36 | def isEpsilon: Boolean = guard.isEpsilon
37 |
38 | /**
39 | * Checks whether the transition is equipped with a given sentence.
40 | *
41 | * @param withSentence The given sentence.
42 | * @return True if the transition is equipped with the sentence.
43 | */
44 | def equipped(withSentence: Sentence): Boolean = guard.isSentence(withSentence)
45 |
46 | override def toString: String = source + "-->" + target + "\t(" + guard.sentence.toString + ")\t" + output
47 |
48 |
49 | def toStringSentence: String = guard.sentence.toString
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/cef/src/main/scala/utils/StringUtils.scala:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | /**
4 | * Some utils for handling strings.
5 | */
6 | object StringUtils {
7 |
8 | /**
9 | * Converts a list to a string where each element is separated by , .
10 | *
11 | * @param args The list to be stringified.
12 | * @tparam T The type of list elements.
13 | * @return The stringified list.
14 | */
15 | def list2Str[T](args: List[T]): String = list2Str(args, "", ",")
16 |
17 | /**
18 | * Converts a list to a string where each element is separated by the given separator.
19 | *
20 | * @param args The list to be stringified.
21 | * @param separator The separator.
22 | * @tparam T The type of list elements.
23 | * @return The stringified list.
24 | */
25 | def list2Str[T](
26 | args: List[T],
27 | separator: String
28 | ): String = list2Str(args, "", separator)
29 |
30 | /**
31 | * Auxiliary recursive function for
32 | * utils.StringUtils#list2Str(scala.collection.immutable.List, java.lang.String, java.lang.String).
33 | *
34 | * @param args The list to be stringified.
35 | * @param currentString The string accumulator.
36 | * @param separator The separator.
37 | * @tparam T The type of list elements.
38 | * @return The stringified list.
39 | */
40 | @scala.annotation.tailrec
41 | def list2Str[T](
42 | args: List[T],
43 | currentString: String,
44 | separator: String
45 | ): String = {
46 | args match {
47 | case Nil => currentString
48 | case last :: Nil => currentString + last.toString
49 | case head :: tail => list2Str(tail, currentString + head.toString + separator, separator)
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/task/fsmTask/DSRATask.scala:
--------------------------------------------------------------------------------
1 | package workflow.task.fsmTask
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.DSRAInterface
5 | import workflow.provider.DSRAProvider
6 | import workflow.provider.source.dsra.DSRASourceFromSREM
7 | import workflow.task.Task
8 |
9 | object DSRATask {
10 | /**
11 | * Constructor for DSRA task.
12 | *
13 | * @param fn Path to file containing patterns
14 | * @param declarations Path to file containing declarations, i.e., extras (extra predicates to be taken into account)
15 | * and exclusives, i.e., sets of mutually exclusive predicates
16 | * @return The DSRA task.
17 | */
18 | def apply(
19 | fn: String,
20 | declarations: String
21 | ): DSRATask = new DSRATask(fn, declarations)
22 | }
23 |
24 | /**
25 | * Builds DSRA provider from a set of patterns.
26 | *
27 | * @param fn Path to file containing patterns
28 | * @param declarations Path to file containing declarations, i.e., extras (extra predicates to be taken into account)
29 | * and exclusives, i.e., sets of mutually exclusive predicates
30 | */
31 | class DSRATask private (
32 | fn: String,
33 | declarations: String
34 | ) extends Task with LazyLogging {
35 |
36 | /**
37 | * Executes the task.
38 | *
39 | * @return A list of DSRA(s) (interfaces), one for each pattern.
40 | */
41 | override def execute(): List[DSRAInterface] = {
42 | logger.info("Executing DSRA task...")
43 | val dsrap = DSRAProvider(new DSRASourceFromSREM(fn, declarations))
44 | val dsra = dsrap.provide()
45 | logger.debug("DSRAs built.")
46 | logger.info("DSRA task done.")
47 | dsra
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/vmm/pst/spsa/SPSATransition.scala:
--------------------------------------------------------------------------------
1 | package model.vmm.pst.spsa
2 |
3 | import model.vmm.Symbol
4 |
5 | object SPSATransition {
6 | /**
7 | * Constructor for SPSA transitions.
8 | *
9 | * @param target The target state.
10 | * @param withSymbol The transition symbol.
11 | * @param prob The transition probability.
12 | * @return A SPSA transition.
13 | */
14 | def apply(
15 | target: SPSAState,
16 | withSymbol: Symbol,
17 | prob: Double
18 | ): SPSATransition = new SPSATransition(target, withSymbol, prob)
19 | }
20 |
21 | /**
22 | * Represents a SPSA transition. Each transition has a symbol and a probability.
23 | * The source state is not given, because transitions are always created from within a SPSA state and the source
24 | * state is thus implicitly known.
25 | * As target state, we directly pass the object of that state and not the target state's id.
26 | * CAUTION: this might create problems during serialization due to infinite recursion.
27 | *
28 | * TODO: maybe target should be the state's identifier and not the state itself due to overflows during serialization
29 | *
30 | * @param target The target state.
31 | * @param withSymbol The transition symbol.
32 | * @param prob The transition probability.
33 | */
34 | class SPSATransition private[spsa] (
35 | val target: SPSAState,
36 | val withSymbol: Symbol,
37 | val prob: Double
38 | ) extends Serializable {
39 |
40 | override def toString: String =
41 | "SYMBOL: " + withSymbol +
42 | "\tTARGET: (" + target.sfaId + "->" + target.psaLabel + ")" +
43 | "\tPROB: " + prob
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/cef/src/main/scala/estimator/HMMEstimator/FSMStateRun.scala:
--------------------------------------------------------------------------------
1 | package estimator.HMMEstimator
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import estimator.RunEstimatorEngine
5 | import fsm.{FSMInterface, SDFAInterface}
6 | import stream.source.StreamSource
7 |
8 | object FSMStateRun {
9 | /**
10 | * Constructor for FSM state run.
11 | *
12 | * @param fsm The FSM for which a HMM is to be trained.
13 | * @return The run.
14 | */
15 | def apply(fsm: FSMInterface): FSMStateRun =
16 | fsm match {
17 | case _: SDFAInterface => new FSMStateRun(fsm.asInstanceOf[SDFAInterface])
18 | case _ => throw new IllegalArgumentException("FSMStateRun accepts only SDFAInterface")
19 | }
20 |
21 | }
22 |
23 | /**
24 | * A FSMStateRun is responsible for running estimation of a HMM for a FSM.
25 | *
26 | * @param fsm The FSM for which a HMM is to be trained.
27 | */
28 | class FSMStateRun private (fsm: SDFAInterface) extends LazyLogging {
29 |
30 | /**
31 | * Method called to actually run estimation. For the same FSM, it can be called multiple times for different
32 | * training streams, if required.
33 | *
34 | * @param streamSource Source for training stream.
35 | * @return The estimator holding the HMMs.
36 | */
37 | def estimateHMM(streamSource: StreamSource): FSMStateEstimator = {
38 | val learner = FSMStateEstimator(fsm)
39 | run(learner, streamSource)
40 | learner.estimate()
41 | learner
42 | }
43 |
44 | private def run(
45 | learner: FSMStateEstimator,
46 | streamSource: StreamSource
47 | ): FSMStateEstimator = {
48 | logger.info("Running HMM estimation...")
49 | val hmme = RunEstimatorEngine(fsm, learner)
50 | streamSource.emitEventsToListener(hmme)
51 | learner
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/vmm/pst/BufferBank.scala:
--------------------------------------------------------------------------------
1 | package model.vmm.pst
2 |
3 | import model.vmm.Symbol
4 |
5 | /**
6 | * A buffer bank is just a collection of cyclic buffers. All buffers are of the same size. For each value of a
7 | * partition attribute, we maintain a cyclic buffer. If the buffer bank does not contain a buffer for a certain
8 | * partition value, a new buffer is created.
9 | *
10 | * @param maxOrder
11 | */
12 | class BufferBank(maxOrder: Int) {
13 | private var bank: Map[String, CyclicBuffer] = Map.empty
14 |
15 | /**
16 | * Pushes a new symbol to the buffer of the given partition value.
17 | *
18 | * @param partitionId The partition value.
19 | * @param symbol The new symbol.
20 | */
21 | def push(
22 | partitionId: String,
23 | symbol: Symbol
24 | ): Unit = {
25 | if (bank.contains(partitionId)) bank(partitionId).pushSymbol(symbol)
26 | else {
27 | // if a buffer for the given partition value does not exist, create it
28 | val newBuffer = new CyclicBuffer(maxOrder)
29 | newBuffer.pushSymbol(symbol)
30 | bank += (partitionId -> newBuffer)
31 | }
32 | }
33 |
34 | /**
35 | * Pops the contents of the buffer for a given partition value.
36 | *
37 | * @param partitionId The partition value.
38 | * @return The buffer's contents, as a list of symbols, starting from the most recently added symbol.
39 | */
40 | def pop(partitionId: String): List[Symbol] = {
41 | require(bank.contains(partitionId))
42 | bank(partitionId).pop
43 | }
44 |
45 | /**
46 | * Clears the buffer for the given partition value.
47 | *
48 | * @param partitionId The given partition value.
49 | */
50 | def clear(partitionId: String): Unit = if (bank.contains(partitionId)) bank(partitionId).clear()
51 |
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/task/predictorTask/PredictorNextTask.scala:
--------------------------------------------------------------------------------
1 | package workflow.task.predictorTask
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import model.forecaster.NextInterface
5 | import model.forecaster.next.NextForecasterBuilder
6 | import workflow.provider.{FSMProvider, MarkovChainProvider}
7 | import workflow.task.Task
8 |
9 | object PredictorNextTask {
10 | /**
11 | * Constructor for next predictor task.
12 | *
13 | * @param fsmp The provider for the FSMs.
14 | * @param mcp The provider for the Markov chains of the FSMs.
15 | * @return A next predictor task.
16 | */
17 | def apply(
18 | fsmp: FSMProvider,
19 | mcp: MarkovChainProvider
20 | ): PredictorNextTask = new PredictorNextTask(fsmp, mcp)
21 | }
22 |
23 | /**
24 | * Creates next predictors for a list of FSMs.
25 | *
26 | * @param fsmp The provider for the FSMs.
27 | * @param mcp The provider for the Markov chains of the FSMs.
28 | */
29 | class PredictorNextTask private (
30 | fsmp: FSMProvider,
31 | mcp: MarkovChainProvider
32 | ) extends Task with LazyLogging {
33 |
34 | /**
35 | * Executes the task.
36 | *
37 | * @return A list of next predictor interfaces.
38 | */
39 | override def execute(): List[NextInterface] = {
40 | logger.info("Executing predictor (next) task...")
41 | val fsmList = fsmp.provide()
42 | val mcList = mcp.provide()
43 | val fmList = fsmList.zip(mcList)
44 | val buildersList = fmList.map(fm => (fm._1.getId, NextForecasterBuilder(fm._1, fm._2)))
45 | val npp = buildersList.map(npb => (npb._1, npb._2.createForecaster()))
46 | val nis = npp.map(np => NextInterface(np._1, np._2))
47 | logger.info("done.")
48 | nis
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/array/archived/Generator.scala:
--------------------------------------------------------------------------------
1 | package stream.array.archived
2 |
3 | import stream.GenericEvent
4 | import stream.array.{EventStream, XMLParser}
5 |
6 | import scala.collection.mutable.Map
7 | import scala.util.control.Breaks._
8 |
9 | class Generator {
10 | private var filename = ""
11 | private val eventStream = new EventStream()
12 | private var cumulativeProbs = Map.empty[String, Double]
13 | private var probs = Map.empty[String, Double]
14 | private var size = 0
15 | private var seed = 100
16 |
17 | def setFilename(fn: String): Unit = { filename = fn }
18 |
19 | def setSeed(s: Int): Unit = { seed = s }
20 |
21 | def parseStreamFile(): Unit = {
22 | val fp = new XMLParser(filename)
23 | size = fp.getSize
24 | var cumProb = 0.0
25 | for ((k, v) <- fp.getProbs) {
26 | cumProb += v
27 | cumulativeProbs += (k -> cumProb)
28 | }
29 | probs = fp.getProbs
30 | }
31 |
32 | def generateStream(): EventStream = {
33 | eventStream.clearStream()
34 | val r = new scala.util.Random(seed)
35 | var p = 0
36 | for (i <- 1 to size) {
37 | p = r.nextInt(100) + 1
38 | breakable {
39 | for ((k, v) <- cumulativeProbs) {
40 | if (p <= v) {
41 | eventStream.addEvent(GenericEvent(k.toString, i))
42 | break
43 | }
44 | }
45 | }
46 | }
47 | eventStream
48 | }
49 |
50 | def generateStream(el: List[Char]): EventStream = {
51 | eventStream.clearStream()
52 | var i = 0
53 | for (e <- el) {
54 | i += 1
55 | eventStream.addEvent(GenericEvent(e.toString, i))
56 | }
57 | eventStream
58 | }
59 |
60 | def getStream: EventStream = eventStream
61 |
62 | def getCumulativeProbs: Map[String, Double] = cumulativeProbs
63 |
64 | def getProbs: Map[String, Double] = probs
65 | }
66 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/BooleanPermutator.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic
2 |
3 | /**
4 | * Creates all permutations of the Boolean values TRUE/FALSE of length up to MaxLength.
5 | * fsm.symbolic.sfa.logic.BooleanPermutator#permutations() is a map. Each key is the length and each value a set with
6 | * all permutations of length equal to the key.
7 | *
8 | * NOTE: Used to be a singleton so that we can incrementally create permutations as needed. However, this can lead to
9 | * problems when running tests in parallel. Singleton shared by all threads.
10 | *
11 | * @param maxLength The maximum length of the permutations.
12 | */
13 | class BooleanPermutator(var maxLength: Int) {
14 | private val truthValues = Set(true, false)
15 | private var permutations: Map[Int, Set[List[Boolean]]] = utils.SetUtils.permutationsAlt(truthValues, maxLength)
16 |
17 | /**
18 | * Retrieves all permutations of a certain given length.
19 | *
20 | * @param length The given length.
21 | * @return The permutations.
22 | */
23 | def getPermutations(length: Int): Set[List[Boolean]] = permutations(length)
24 |
25 | /**
26 | * Retrieves all permutations up to a certain given length.
27 | *
28 | * @param length The given length.
29 | * @return The permutations.
30 | */
31 | def getPermutationsUpTo(length: Int): Map[Int, Set[List[Boolean]]] = permutations.filter(p => p._1 <= length)
32 |
33 | /**
34 | * Expands permutations to a greater length, i.e., creates all extra permutations up to a new greater length.
35 | *
36 | * @param newLength the new length.
37 | */
38 | private def expand(newLength: Int): Unit = {
39 | require(newLength > maxLength)
40 | permutations = utils.SetUtils.permutationsAux1(truthValues, newLength, maxLength, permutations)
41 | maxLength = newLength
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/logic/Assignment.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.logic
2 |
3 | /**
4 | * A class that holds the truth values for a list of predicates, encoded as a map from predicates to their truth
5 | * values.
6 | *
7 | * @param assign a list of Tuple2 with predicates and their truth values.
8 | */
9 | case class Assignment(assign: List[(Predicate, Boolean)]) {
10 | private val assMap: Map[Predicate, Boolean] = assign.toMap
11 | def getValueOf(p: Predicate): Boolean = {
12 | require(assMap.contains(p))
13 | assMap(p)
14 | }
15 |
16 | def contains(p: Predicate): Boolean = assMap.contains(p)
17 |
18 | /**
19 | * Checks if a set of (possibly) mutually exclusive predicates is actually valid, i.e., at most one can be true given
20 | * this assignment.
21 | *
22 | * @param exclusive A set of predicates to be checked whether they are mutually exclusive.
23 | * @return True if the predicates are indeed exclusive for this assignment.
24 | */
25 | def isValid(exclusive: Set[Predicate]): Boolean = {
26 | val filtered = assign.filter(a => exclusive.contains(a._1))
27 | val exclusiveAndTrue = filtered.filter(a => a._2)
28 | (exclusiveAndTrue.size <= 1) // at most one of the exclusive predicates can be true
29 | }
30 |
31 | /**
32 | * Same as fsm.symbolic.sfa.logic.Assignment#isValid(scala.collection.immutable.Set) above, but checks multiple sets
33 | * of exclusives.
34 | * @param exclusives A set of possible exclusives to be checked.
35 | * @return True if all sets of possible exclusives are indeed exclusive.
36 | */
37 | def isValidSet(exclusives: Set[Set[Predicate]]): Boolean = {
38 | exclusives.forall(exclusive => isValid(exclusive))
39 | }
40 |
41 | override def toString: String = {
42 | val str = utils.StringUtils.list2Str(assign, "\t")
43 | str
44 | }
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/symbolic/snfa/SNFAeqNEGSNFA.scala:
--------------------------------------------------------------------------------
1 | package Specs.symbolic.snfa
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.symbolic.sfa.snfa.SNFAUtils
5 | import fsm.symbolic.sre.{SREOperator, SREUtils}
6 | import org.junit.runner.RunWith
7 | import org.scalatest.FlatSpec
8 | import org.scalatestplus.junit.JUnitRunner
9 | import stream.GenericEvent
10 | import ui.ConfigUtils
11 | import utils.testing.{PatternGenerator, SymbolWordGenerator}
12 |
13 | @RunWith(classOf[JUnitRunner])
14 | class SNFAeqNEGSNFA extends FlatSpec with LazyLogging {
15 |
16 | "SNFA " should " accept the complement of words accepted by negated SNFA " in {
17 | testPatterns()
18 | }
19 |
20 | def testPatterns(): Unit = {
21 | logger.debug("test patterns")
22 | val patterns = PatternGenerator.generateRegExpPatterns(
23 | ConfigUtils.noOfPatterns,
24 | Set("A", "B", "C"),
25 | ConfigUtils.patternMaxDepth
26 | )
27 | val words = SymbolWordGenerator.generateStrings(Set("A", "B", "C"), ConfigUtils.wordMaxLength)
28 | for (p <- patterns) {
29 | val formula = SREUtils.re2formula(p)
30 | val negFormula = SREOperator(fsm.symbolic.sre.RegularOperator.NEG, List(formula))
31 | logger.debug("Testing with...")
32 | logger.debug("...original formula: " + formula)
33 | logger.debug("...negated formula: " + negFormula)
34 | val snfa = SNFAUtils.buildSNFA(formula)
35 | val negSnfa = SNFAUtils.buildSNFA(negFormula)
36 | for (w <- words) {
37 | val event = w.map(c => GenericEvent(c, 0))
38 | val snfaAccepts = snfa.accepts(event)
39 | val negSnfaAccepts = negSnfa.accepts(event)
40 | logger.debug("word " + w)
41 | logger.debug("SNFA:" + snfaAccepts)
42 | logger.debug("Negated SNFA" + negSnfaAccepts)
43 | assert(snfaAccepts == !negSnfaAccepts)
44 | }
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/cef/src/main/scala/stream/array/ProbMapStream.scala:
--------------------------------------------------------------------------------
1 | package stream.array
2 |
3 | import stream.GenericEvent
4 | import scala.collection.mutable
5 | import scala.util.control.Breaks._
6 |
7 | /**
8 | * Creates a random event stream from a set of event type probabilities.
9 | *
10 | * @param size The size of the stream.
11 | * @param p The event type probabilities.
12 | * @param s The seed for the random generator.
13 | */
14 | class ProbMapStream private[stream] (
15 | size: Int,
16 | p: mutable.Map[String, Double],
17 | s: Int
18 | ) extends EventStreamI {
19 | require(size > 0 & size <= 10000000)
20 | private var cumulativeProbs = mutable.Map.empty[String, Double]
21 | require(checkProbs(p))
22 | val probs: mutable.Map[String, Double] = p
23 | private val seed = s
24 |
25 | /**
26 | * @return The event stream.
27 | */
28 | def generateStream(): EventStream = {
29 | val eventStream = new EventStream()
30 | val r = new scala.util.Random(seed)
31 | var p = 0.0
32 | for (i <- 1 to size) {
33 | p = r.nextDouble()
34 | breakable {
35 | for ((k, v) <- cumulativeProbs) {
36 | if (p <= v) {
37 | eventStream.addEvent(GenericEvent(k, i))
38 | break
39 | }
40 | }
41 | }
42 | }
43 | eventStream.setEventTypes(probs.keys.toSet)
44 | eventStream.setProbs(probs)
45 | eventStream.setCumulativeProbs(cumulativeProbs)
46 | eventStream
47 | }
48 |
49 | private def checkProbs(p: mutable.Map[String, Double]): Boolean = {
50 | var totalProb = 0.0
51 | for ((k, v) <- p) {
52 | if (v < 0.0 | v > 1.0) return false
53 | totalProb += v
54 | if (totalProb > 1.0) return false
55 | cumulativeProbs += (k -> totalProb)
56 | }
57 | true
58 | }
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/cef/src/main/scala/utils/MathUtils.scala:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import breeze.stats.distributions.Uniform
4 |
5 | import scala.math.log10
6 |
7 | object MathUtils {
8 | /**
9 | * Estimates logarithm of a number with a given base.
10 | *
11 | * @param x The number.
12 | * @param base The base.
13 | * @return The logarithm.
14 | */
15 | def logbase(
16 | x: Double,
17 | base: Double
18 | ): Double = log10(x) / log10(base)
19 |
20 | /**
21 | * Uninormly samples a double from an interval [0,max].
22 | *
23 | * @param max The upper limit of the interval.
24 | * @return the sample
25 | */
26 | def sampleUniform(max: Int): Double = {
27 | val uni = new Uniform(0, max)
28 | val s = uni.sample()
29 | s
30 | }
31 |
32 | /**
33 | * Uninormly samples an integer from an interval [0,max].
34 | *
35 | * @param max The upper limit of the interval.
36 | * @return the sample
37 | */
38 | def sampleIntUniform(max: Int): Int = sampleUniform(max).toInt
39 |
40 | /**
41 | * Performs simple linear regression with least squares.
42 | *
43 | * @param x The vector of x values.
44 | * @param y The vector of y values.
45 | * @return The slope and intercept of the resulting line.
46 | */
47 | def slopeInterceptLinearLeastSquares(
48 | x: List[Double],
49 | y: List[Double]
50 | ): (Double,Double) = {
51 | require(x.nonEmpty & x.size == y.size)
52 | val n = x.size
53 | val x2 = x.map(e => e * e)
54 | val xy = x.zip(y).map(e => (e._1 * e._2))
55 | val sumx = x.sum
56 | val sumy = y.sum
57 | val sumx2 = x2.sum
58 | val sumxy = xy.sum
59 | val slope = ((n*sumxy) - (sumx * sumy)) / ((n*sumx2) - (sumx * sumx))
60 | val intercept = (sumy - (slope * sumx)) / n
61 | (slope, intercept)
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/vmm/pst/psa/PSAMatrix.scala:
--------------------------------------------------------------------------------
1 | package model.vmm.pst.psa
2 |
3 | import breeze.linalg.eig.DenseEig
4 | import breeze.linalg.{DenseMatrix, DenseVector, eig}
5 | import model.vmm.SymbolWord
6 |
7 | object PSAMatrix {
8 | /**
9 | * Constructor for PSA transition matrices.
10 | *
11 | * @param matrix The matrix.
12 | * @param state2row A map from PSA state labels to Markov chain state ids.
13 | * @param row2state A map from Markov chain state ids to PSA state labels.
14 | * @return A PSA matrix.
15 | */
16 | def apply(
17 | matrix: DenseMatrix[Double],
18 | state2row: Map[SymbolWord, Int],
19 | row2state: Map[Int, SymbolWord]
20 | ): PSAMatrix = new PSAMatrix(matrix, state2row, row2state)
21 | }
22 |
23 | /**
24 | * Transition matrix of a PSA.
25 | *
26 | * @param matrix The matrix.
27 | * @param state2row A map from PSA state labels to Markov chain state ids.
28 | * @param row2state A map from Markov chain state ids to PSA state labels.
29 | */
30 | class PSAMatrix(
31 | matrix: DenseMatrix[Double],
32 | state2row: Map[SymbolWord, Int],
33 | row2state: Map[Int, SymbolWord]
34 | ) {
35 | // some results about the eigenvalues and eigenvectors of the matrix
36 | // could be interesting in order to study the limiting behavior of the matrix, e.g., for long-term forecasting
37 | val eigen: DenseEig = eig(matrix)
38 | val eigenValues: DenseVector[Double] = eigen.eigenvalues
39 | val eigenValuesComplex: DenseVector[Double] = eigen.eigenvaluesComplex
40 | val eigenVectors: DenseMatrix[Double] = eigen.eigenvectors
41 |
42 | override def toString: String = {
43 | "MATRIX\n" + matrix.toString() +
44 | "\nEigenValues\n" + eigenValues +
45 | "\nEigenValuesComplex\n" + eigenValuesComplex +
46 | "\nEigenVectors\n" + eigenVectors +
47 | "\nRow2State\n" + row2state +
48 | "\nState2Row\n" + state2row
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/misc/PermutationsSpec.scala:
--------------------------------------------------------------------------------
1 | package Specs.misc
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.symbolic.logic.BooleanPermutator
5 | import org.junit.runner.RunWith
6 | import org.scalatest.FlatSpec
7 | import org.scalatestplus.junit.JUnitRunner
8 |
9 | @RunWith(classOf[JUnitRunner])
10 | class PermutationsSpec extends FlatSpec with LazyLogging {
11 | "Permutations constructed in different ways " should " still be the same" in {
12 | val elements = Set(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
13 | val maxLength = 5
14 | var permsTrad = Map[Int, Set[List[Int]]]()
15 |
16 | var t1 = System.nanoTime()
17 | for (i <- 1 to maxLength) {
18 | val perms = utils.SetUtils.permutations(elements, i)
19 | permsTrad = permsTrad + (i -> perms)
20 | }
21 | var t2 = System.nanoTime()
22 | logger.debug("perms time: " + (t2 - t1) / 1000000.0)
23 |
24 | t1 = System.nanoTime()
25 | val permsAlt = utils.SetUtils.permutationsAlt(elements, maxLength)
26 | t2 = System.nanoTime()
27 | logger.debug("perms alt time: " + (t2 - t1) / 1000000.0)
28 |
29 | checkPerms(permsTrad, permsAlt)
30 |
31 | val truthValues = Set(true, false)
32 | val permsAltBool = utils.SetUtils.permutationsAlt(truthValues, maxLength)
33 | val boolPerm = new BooleanPermutator(maxLength)
34 | val permsPermutatorBool = boolPerm.getPermutationsUpTo(maxLength)
35 | logger.debug("perms alt bool:" + permsAltBool)
36 | logger.debug("perms perm bool:" + permsPermutatorBool)
37 | checkPerms(permsAltBool, permsPermutatorBool)
38 | }
39 |
40 | private def checkPerms[T](
41 | perms1: Map[Int, Set[List[T]]],
42 | perms2: Map[Int, Set[List[T]]]
43 | ): Unit = {
44 | assert(perms1.keySet == perms2.keySet)
45 | val it = perms1.keySet.iterator
46 | while (it.hasNext) {
47 | val i = it.next()
48 | assert(perms1(i) == perms2(i))
49 | }
50 | }
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/symbolic/sdfa/SDFADistances.scala:
--------------------------------------------------------------------------------
1 | package Specs.symbolic.sdfa
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.SDFAInterface
5 | import fsm.symbolic.logic.PredicateConstructor
6 | import fsm.symbolic.sfa.SFAUtils
7 | import fsm.symbolic.sfa.snfa.SNFAUtils
8 | import fsm.symbolic.sre.SREUtils
9 | import org.junit.runner.RunWith
10 | import org.scalatest.FlatSpec
11 | import org.scalatestplus.junit.JUnitRunner
12 | import ui.ConfigUtils
13 | import utils.testing.PatternGenerator
14 |
15 | @RunWith(classOf[JUnitRunner])
16 | class SDFADistances extends FlatSpec with LazyLogging {
17 | "Optimized calculation of shortest walk distances" should "be the same as exhaustive calculations" in {
18 | testPatterns()
19 | }
20 |
21 | def testPatterns(): Unit = {
22 | logger.debug("test patterns")
23 | val patterns = PatternGenerator.generateRegExpPatterns(
24 | ConfigUtils.noOfPatterns,
25 | Set("A", "B", "C"),
26 | ConfigUtils.patternMaxDepth
27 | )
28 | val pa = PredicateConstructor.getEventTypePred("A")
29 | val pb = PredicateConstructor.getEventTypePred("B")
30 | val pc = PredicateConstructor.getEventTypePred("C")
31 | val exclusive = Set(pa, pb, pc)
32 | for (p <- patterns) {
33 | val formula = SREUtils.re2formula(p)
34 | logger.debug("Testing distances for pattern/formula: " + p.toString + "/" + formula.toString)
35 | val snfa = SNFAUtils.buildSNFA(formula)
36 | val sdfa = SFAUtils.determinizeI(snfa, Set(exclusive), Set.empty)
37 | val sdfai = SDFAInterface(sdfa)
38 | sdfai.estimateRemainingPercentage
39 | val optDistances = sdfai.shortestPathDistances
40 | logger.debug("SDFA: " + sdfa.toString)
41 | logger.debug("Optimized distances: " + optDistances.toString())
42 | val pathDistances = sdfai.findShortestPathDistancesPaths
43 | logger.debug("Exhaustive distances: " + pathDistances.toString())
44 | assert(optDistances == pathDistances)
45 | }
46 | }
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/forecaster/wt/WtForecaster.scala:
--------------------------------------------------------------------------------
1 | package model.forecaster.wt
2 |
3 | import model.forecaster.ForecasterType.ForecasterType
4 | import model.forecaster.runtime.Forecast
5 |
6 | object WtForecaster {
7 | /**
8 | * Constructor for a waiting-time predictor.
9 | *
10 | * @param forecastsTable The table holding a forecast interval for each emitting state.
11 | * @param forecasterType REGRESSION or CLASSIFICATION.
12 | * @return The waiting-time predictor.
13 | */
14 | private[wt] def apply(
15 | forecastsTable: Map[Int, Forecast],
16 | forecasterType: ForecasterType
17 | ): WtForecaster =
18 | new WtForecaster(forecastsTable, forecasterType)
19 | }
20 |
21 | /**
22 | * Predictor whose forecasts should be derived from waiting-time distributions.
23 | *
24 | * @param forecastsTable The table holding a forecast interval for each emitting state.
25 | * @param forecasterType REGRESSION or CLASSIFICATION.
26 | */
27 | class WtForecaster private(
28 | val forecastsTable: Map[Int, Forecast],
29 | val forecasterType: ForecasterType
30 | ) extends Serializable {
31 |
32 | /**
33 | * Retrieves the forecast interval from the table for a given state.
34 | *
35 | * @param state The id of the given state.
36 | * @return The forecast interval.
37 | */
38 | def getForecastFromTable(state: Int): Forecast = {
39 | val tablePred = forecastsTable(state)
40 | if (tablePred.start != -1) Forecast(
41 | start = tablePred.start,
42 | end = tablePred.end,
43 | middle = tablePred.middle,
44 | prob = tablePred.prob,
45 | positive = tablePred.positive
46 | )
47 | else Forecast()
48 | }
49 |
50 | /**
51 | * @return The ids of all the states for which we have entries in the predictions table.
52 | */
53 | def getStates: Set[Int] = forecastsTable.keySet
54 | }
55 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/SFA.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.symbolic.Automaton
5 | import fsm.symbolic.TransitionOutput.TransitionOutput
6 | import fsm.symbolic.sfa.sdfa.SDFA
7 | import fsm.symbolic.sra.Configuration
8 | import stream.GenericEvent
9 |
10 | /**
11 | * Abstract class representing symbolic automata.
12 | *
13 | * @param states The states of the automaton as a map of IDs to states.
14 | * @param transitions The list of transitions.
15 | * @param start The id of the start state.
16 | * @param finals The set of IDs of the final states.
17 | */
18 | abstract class SFA(
19 | states: Map[Int, SFAState],
20 | transitions: List[SFATransition],
21 | start: Int,
22 | finals: Set[Int]
23 | ) extends Automaton(states, transitions, start, finals) with Serializable with LazyLogging {
24 | require(start >= 0)
25 | require(states.keySet.contains(start))
26 | require(finals.forall(f => states.keySet.contains(f)))
27 |
28 |
29 | /**
30 | * Finds the state(s) we can reach from a given state with a given event.
31 | *
32 | * @param fromStateId The id of the given state.
33 | * @param withEvent The given event.
34 | * @return The state(s) reached.
35 | */
36 | override def getDelta(
37 | fromStateId: Int,
38 | withEvent: GenericEvent
39 | ): Set[Configuration] = {
40 | val result = super.getDelta(fromStateId, withEvent)
41 | if (this.isInstanceOf[SDFA] & result.size != 1) {
42 | logger.error("Delta for SDFA should always return single state.")
43 | throw new Error("Delta for SDFA should always return single state.")
44 | }
45 | result
46 | }
47 |
48 | override def getDeltaNoConfArray(fromStateId: Int, withEvent: GenericEvent): List[(Int, TransitionOutput)] =
49 | super.getDeltaNoConfArray(fromStateId, withEvent)
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/symbolic/snfa/NFAEqSNFA.scala:
--------------------------------------------------------------------------------
1 | package Specs.symbolic.snfa
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.classical.fa.nfa.NFAFactory
5 | import fsm.symbolic.sre.SREUtils
6 | import fsm.symbolic.sfa.snfa.SNFAUtils
7 | import org.junit.runner.RunWith
8 | import org.scalatest.FlatSpec
9 | import org.scalatestplus.junit.JUnitRunner
10 | import stream.GenericEvent
11 | import ui.ConfigUtils
12 | import utils.testing.{PatternGenerator, SymbolWordGenerator}
13 |
14 | @RunWith(classOf[JUnitRunner])
15 | class NFAEqSNFA extends FlatSpec with LazyLogging {
16 | "SNFA" should "accept same words as corresponding NFA" in {
17 | testPatterns()
18 | }
19 |
20 | def testPatterns(): Unit = {
21 | logger.debug("\n\n\t testPatterns\n\n")
22 | val patterns = PatternGenerator.generateRegExpPatterns(
23 | ConfigUtils.noOfPatterns,
24 | Set("A", "B", "C"),
25 | ConfigUtils.patternMaxDepth
26 | )
27 | val words = SymbolWordGenerator.generateStrings(Set("A", "B", "C"), ConfigUtils.wordMaxLength)
28 | for (p <- patterns) {
29 | val formula = SREUtils.re2formula(p)
30 | logger.debug("Testing NFA-SNFA equivalence for pattern/formula: " + p.toString + "/" + formula.toString)
31 | val snfa = SNFAUtils.buildSNFA(formula)
32 | val elsnfa = SNFAUtils.eliminateEpsilon(snfa)
33 | for (word <- words) {
34 | logger.debug("...with word " + word)
35 | val events = word.map(c => GenericEvent(c, 0))
36 | val nfa = NFAFactory.buildNFA(p, word.toSet)
37 | val snfaAccepts = snfa.accepts(events)
38 | val elsnfaAccepts = elsnfa.accepts(events)
39 | val nfaAccepts = nfa.accepts(word)
40 | logger.debug("\t\t Result snfa/nfa: " + snfaAccepts + "/" + nfaAccepts)
41 | assert(snfaAccepts == nfaAccepts)
42 | assert(!elsnfa.hasEpsilon)
43 | logger.debug("\t\t Result snfa/elsnfa: " + snfaAccepts + "/" + elsnfaAccepts)
44 | assert(snfaAccepts == elsnfaAccepts)
45 | }
46 | }
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/project/Dependencies.scala:
--------------------------------------------------------------------------------
1 | import sbt._
2 |
3 | object Dependencies {
4 |
5 | object v {
6 |
7 | final val Logback = "1.2.3"
8 | final val ScalaLogging = "3.9.2"
9 |
10 | final val ScalaTest = "3.0.8"
11 | final val ScalaCheck = "1.14.1"
12 | final val JUnit = "4.13"
13 |
14 | final val Slick = "3.3.0"
15 | final val pSQL = "42.2.9"
16 | final val ScalaCSV = "1.3.6"
17 | final val JTS = "1.16.1"
18 |
19 | final val Breeze = "1.0"
20 | final val Smile = "1.5.3"
21 |
22 | final val Parser = "3.7.1"
23 | final val Config = "1.4.0"
24 | final val LearnLib = "0.15.0"
25 |
26 | final val Json = "2.7.4"
27 | final val Flink = "1.9.0"
28 |
29 | }
30 |
31 | // Logging using slf4j and logback
32 | lazy val Logging: Seq[ModuleID] = Seq(
33 | "ch.qos.logback" % "logback-classic" % v.Logback,
34 | "com.typesafe.scala-logging" %% "scala-logging" % v.ScalaLogging
35 | )
36 |
37 | // ScalaTest and ScalaCheck for UNIT testing
38 | lazy val Testing: Seq[ModuleID] = Seq(
39 | "org.scalatest" %% "scalatest" % v.ScalaTest % "test",
40 | "org.scalacheck" %% "scalacheck" % v.ScalaCheck % "test",
41 | "junit" % "junit" % v.JUnit
42 | )
43 |
44 | lazy val Data: Seq[ModuleID] = Seq(
45 | "com.typesafe.slick" %% "slick" % v.Slick,
46 | "com.typesafe.slick" %% "slick-hikaricp" % v.Slick,
47 | "org.postgresql" % "postgresql" % v.pSQL,
48 | "com.github.tototoshi" %% "scala-csv" % v.ScalaCSV
49 | )
50 |
51 | lazy val Math: Seq[ModuleID] = Seq(
52 | "org.scalanlp" %% "breeze" % v.Breeze,
53 | "org.scalanlp" %% "breeze-natives" % v.Breeze,
54 | "com.github.haifengl" %% "smile-scala" % v.Smile,
55 | "org.locationtech.jts" % "jts-core" % v.JTS
56 | )
57 |
58 | lazy val Tools: Seq[ModuleID] = Seq(
59 | "com.github.scopt" %% "scopt" % v.Parser,
60 | "com.typesafe" % "config" % v.Config,
61 | "de.learnlib" % "learnlib-rpni" % v.LearnLib,
62 | "com.typesafe.play" %% "play-json" % v.Json,
63 | "org.apache.flink" %% "flink-connector-kafka" % v.Flink
64 | )
65 | }
66 |
--------------------------------------------------------------------------------
/cef/src/test/scala/Specs/classical/dfa/DisSpec.scala:
--------------------------------------------------------------------------------
1 | package Specs.classical.dfa
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.DFAInterface
5 | import fsm.classical.fa.dfa.DFAUtils
6 | import org.junit.runner.RunWith
7 | import org.scalatest.FlatSpec
8 | import org.scalatestplus.junit.JUnitRunner
9 | import stream.source.EmitMode
10 | import stream.StreamFactory
11 | import ui.ConfigUtils
12 | import utils.testing.PatternGenerator
13 | import workflow.provider.source.dfa.DFASourceRegExp
14 | import workflow.provider.{DFAProvider, FSMProvider}
15 | import workflow.task.engineTask.ERFTask
16 | import scala.collection.mutable
17 |
18 | @RunWith(classOf[JUnitRunner])
19 | class DisSpec extends FlatSpec with LazyLogging {
20 | "Disambiguated DFAs " should " produce same match dumps as 0-order DFAs " in {
21 | val patterns = PatternGenerator.generateRegExpPatterns(ConfigUtils.noOfPatterns, Set("a", "b", "c"), ConfigUtils.patternMaxDepth)
22 | val ss = StreamFactory.getStreamSource(1000, mutable.Map("a" -> 0.25, "b" -> 0.25, "c" -> 0.25, "d" -> 0.25), 10)
23 | val es = ss.emitEventsAndClose(EmitMode.BUFFER)
24 | val policy = ConfigUtils.defaultPolicy
25 | val maxM = ConfigUtils.maxOrder
26 | for (pat <- patterns) {
27 | logger.debug("\nTesting pattern: " + pat)
28 | val dfaProvider = DFAProvider(DFASourceRegExp(pat, policy, 0, es.getEventTypes))
29 | val fsmp = FSMProvider(dfaProvider)
30 | val erf = ERFTask(fsmp, ss, show = false, reset = false)
31 | val prof0 = erf.execute()
32 | for (m <- 1 to maxM) {
33 | logger.debug("\nTesting pattern: " + pat + " @order " + m)
34 | val dfaProvider = DFAProvider(DFASourceRegExp(pat, policy, m, es.getEventTypes))
35 | val fsmp = FSMProvider(dfaProvider)
36 | val erf = ERFTask(fsmp, ss, show = false, reset = false)
37 | val profm = erf.execute()
38 | assert(DFAUtils.isMUnambiguous(erf.getEngine.getFSMs.head.asInstanceOf[DFAInterface].dfa, m))
39 | assert(prof0.getMatchDump.checkAgainst(profm.getMatchDump))
40 | }
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/cef/src/main/scala/fsm/symbolic/sfa/snfa/SNFAStateMutant.scala:
--------------------------------------------------------------------------------
1 | package fsm.symbolic.sfa.snfa
2 |
3 | import fsm.symbolic.sfa.SFATransition
4 |
5 | object SNFAStateMutant {
6 | /**
7 | * Constructor for SNFA State mutant.
8 | *
9 | * @param id The unique id of the state.
10 | * @return The state.
11 | */
12 | def apply(id: Int): SNFAStateMutant = new SNFAStateMutant(id)
13 | }
14 |
15 | /**
16 | * Class representing the states of fsm.symbolic.sfa.snfa.SNFAMutantGraph, i.e., the states of a SNFA enooded as a
17 | * graph. Each such state should hold direct references to all of its next and previous states.
18 | *
19 | * @param id The unique id of the state.
20 | */
21 | class SNFAStateMutant private[snfa] (val id: Int) extends Serializable {
22 | // The sets of next and previous states are mutable because each state is incrementally constructed by calls to
23 | // fsm.symbolic.sfa.snfa.SNFAStateMutant.addNext and fsm.symbolic.sfa.snfa.SNFAStateMutant.addPrevious.
24 |
25 | // the set of next states
26 | private val next = collection.mutable.Set[(SNFAStateMutant, SFATransition)]()
27 | // the set of previous states
28 | private val previous = collection.mutable.Set[(SNFAStateMutant, SFATransition)]()
29 |
30 | /**
31 | * Adds a next state.
32 | *
33 | * @param new_next The next state to be added with the relevant transition.
34 | */
35 | def addNext(new_next: (SNFAStateMutant, SFATransition)): Unit = next += new_next
36 |
37 | /**
38 | * Adds a previous state.
39 | *
40 | * @param new_previous The previous state to be added with the relevant transition.
41 | */
42 | def addPrevious(new_previous: (SNFAStateMutant, SFATransition)): Unit = previous += new_previous
43 |
44 | /**
45 | * @return All next states along with their transitions.
46 | */
47 | def getNext: collection.mutable.Set[(SNFAStateMutant, SFATransition)] = next
48 |
49 | /**
50 | * @return All previous states along with their transitions.
51 | */
52 | def getPrevious: collection.mutable.Set[(SNFAStateMutant, SFATransition)] = previous
53 |
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/cef/src/main/scala/utils/MiscUtils.scala:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import breeze.stats.distributions.Uniform
4 | import com.typesafe.scalalogging.LazyLogging
5 |
6 | object MiscUtils extends LazyLogging {
7 |
8 | /**
9 | * Randomly shuffles the elements of the list. Creates a new list with the same elements but with another random
10 | * order.
11 | *
12 | * @param initial The initial list.
13 | * @tparam T The type of elements.
14 | * @return A new list with the same elements randomly re-ordered.
15 | */
16 | def shuffleList[T](initial: List[T]): List[T] = shuffleListAux(List.empty[T], initial)
17 |
18 | /**
19 | * Auxiliary recursive function to randomly shuffle a list.
20 | *
21 | * @param shuffled The current shuffled list.
22 | * @param remaining The remaining elements from which to keep picking elements.
23 | * @tparam T The type of elements.
24 | * @return The shuffled list.
25 | */
26 | @scala.annotation.tailrec
27 | private def shuffleListAux[T](
28 | shuffled: List[T],
29 | remaining: List[T]
30 | ): List[T] = {
31 | remaining match {
32 | case Nil => shuffled
33 | case _ :: _ => {
34 | val (sampledElement, remainingElements) = randomPick(remaining)
35 | shuffleListAux(sampledElement :: shuffled, remainingElements)
36 | }
37 | }
38 | }
39 |
40 | /**
41 | * Randomly picks an elements from a list. Returns the element and the rest of the list with the element removed.
42 | *
43 | * @param elements The initial list.
44 | * @tparam T The type of elements.
45 | * @return The randomly chosen element and the list with the element removed.
46 | */
47 | private def randomPick[T](elements: List[T]): (T, List[T]) = {
48 | val uni = Uniform(0, elements.length - 0.001)
49 | val sampleIndex = uni.sample().toInt
50 | val sampledElement = elements(sampleIndex)
51 | val remainingElements = (elements.toSet - sampledElement).toList
52 | (sampledElement, remainingElements)
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/cef/src/main/scala/workflow/task/fsmTask/DFATask.scala:
--------------------------------------------------------------------------------
1 | package workflow.task.fsmTask
2 |
3 | import com.typesafe.scalalogging.LazyLogging
4 | import fsm.DFAInterface
5 | import fsm.CountPolicy.CountPolicy
6 | import workflow.provider.DFAProvider
7 | import workflow.provider.source.dfa.DFASourceFromXML
8 | import workflow.task.Task
9 |
10 | object DFATask {
11 | /**
12 | * Constructor for DFA task.
13 | *
14 | * @param fn The path to the file.
15 | * @param policy The counting policy (same for all patterns, if there are multiple).
16 | * @param order The disambiguation order (same for all patterns, if there are multiple).
17 | * @param streamSymbols The set of all symbols (same for all patterns, if there are multiple).
18 | * @return DFA task.
19 | */
20 | def apply(
21 | fn: String,
22 | policy: CountPolicy,
23 | order: Int,
24 | streamSymbols: Set[String]
25 | ): DFATask = new DFATask(fn, policy, order, streamSymbols)
26 | }
27 |
28 | /**
29 | * Task for creating DFA(s) from a XML file.
30 | *
31 | * @param fn The path to the file.
32 | * @param policy The counting policy (same for all patterns, if there are multiple).
33 | * @param order The disambiguation order (same for all patterns, if there are multiple).
34 | * @param streamSymbols The set of all symbols (same for all patterns, if there are multiple).
35 | */
36 | class DFATask private (
37 | fn: String,
38 | policy: CountPolicy,
39 | order: Int,
40 | streamSymbols: Set[String]
41 | ) extends Task with LazyLogging {
42 |
43 | /**
44 | * Executes the task.
45 | *
46 | * @return A list of DFAs (interfaces), one for each pattern in the file.
47 | */
48 | override def execute(): List[DFAInterface] = {
49 | logger.info("Executing DFA task...")
50 | val dfap = DFAProvider(DFASourceFromXML(fn, policy, order, streamSymbols))
51 | val dfas = dfap.provide()
52 | logger.info("\t\t\tDFAs:\n" + dfas.toString)
53 | logger.info("done.")
54 | dfas
55 | }
56 |
57 | }
58 |
--------------------------------------------------------------------------------
/cef/src/main/scala/model/vmm/Symbol.scala:
--------------------------------------------------------------------------------
1 | package model.vmm
2 |
3 | object Symbol {
4 | /**
5 | * Constructor for symbols.
6 | *
7 | * @param value The symbol's value. Must be non-negative.
8 | * @return The symbol.
9 | */
10 | def apply(value: Int): Symbol = {
11 | require(value >= 0)
12 | new Symbol(value)
13 | }
14 |
15 | /**
16 | * Constructor for the empty symbol. The value of the empty symbol is -1.
17 | *
18 | * @return The empty symbol
19 | */
20 | def apply(): Symbol = new Symbol(-1) // for the empty symbol
21 | }
22 |
23 | /**
24 | * Class representing symbols in VMMs. For convenience, each symbol is actually represented by an int.
25 | *
26 | * @param value The number of the symbol. Symbols with the same value are assumed to be equal.
27 | */
28 | class Symbol private (val value: Int) extends Serializable with Ordered[Symbol] {
29 |
30 | /**
31 | * Determines whether this symbol is equal to another, by comparing their values.
32 | *
33 | * @param other The other symbol.
34 | * @return True if both symbols have the same value. If the other object is not a symbol, returns false.
35 | */
36 | override def equals(other: Any): Boolean = {
37 | other match {
38 | case other: Symbol => other.canEqual(this) && this.value == other.value
39 | case _ => false
40 | }
41 | }
42 |
43 | /**
44 | * @return The value's hash code.
45 | */
46 | override def hashCode(): Int = value.hashCode()
47 |
48 | /**
49 | * Determines whether we can compare this symbol to another object.
50 | *
51 | * @param other The other object.
52 | * @return True if the other object is also an instance of Symbol.
53 | */
54 | def canEqual(other: Any): Boolean = other.isInstanceOf[Symbol]
55 |
56 | /**
57 | * Compares this symbol to another, returns their numerical difference.
58 | *
59 | * @param other The other symbol.
60 | * @return The numerical difference of this symbol's value to that of the other's.
61 | */
62 | override def compare(other: Symbol): Int = this.value - other.value
63 |
64 | override def toString: String = value.toString
65 | }
66 |
--------------------------------------------------------------------------------