├── cmd └── anom │ └── main.go ├── algorithm.go ├── glide.yaml ├── iterator_test.go ├── stl_test.go ├── weighted_sum_test.go ├── semaphore.go ├── derivative_test.go ├── ema_test.go ├── .circleci └── config.yml ├── bitmap_binary_test.go ├── absolute_threshold_test.go ├── normal_distribution_test.go ├── anomaly.go ├── score_list_test.go ├── iterator.go ├── score_list.go ├── pearson_correlation_test.go ├── go.mod ├── normal_distribution.go ├── absolute_threshold.go ├── detector_test.go ├── bitmap_test.go ├── bitmap_binary.go ├── spearman_correlation_test.go ├── cross_correlation_test.go ├── pearson_correlation.go ├── testdata ├── airline-passengers.csv └── co2.csv ├── glide.lock ├── weighted_sum.go ├── ema.go ├── derivative.go ├── math_test.go ├── detector.go ├── .gitignore ├── correlator_test.go ├── correlator.go ├── stl.go ├── spearman_correlation.go ├── helpers.go ├── math.go ├── README.md ├── time_series_test.go ├── go.sum ├── cross_correlation.go ├── bitmap.go ├── time_series.go └── LICENSE /cmd/anom/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | -------------------------------------------------------------------------------- /algorithm.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | // Algorithm is the base interface of all algorithms 4 | type Algorithm interface { 5 | Run(*TimeSeries) *ScoreList 6 | computeScores(*TimeSeries) (*ScoreList, error) 7 | } 8 | 9 | // TimePeriod represents a time period marked by start and end timestamps. 10 | type TimePeriod struct { 11 | Start float64 12 | End float64 13 | } 14 | -------------------------------------------------------------------------------- /glide.yaml: -------------------------------------------------------------------------------- 1 | package: . 2 | import: 3 | - package: github.com/project-anomalia/stl 4 | version: v1.3.1 5 | - package: github.com/google/flatbuffers 6 | version: 1.11.0 7 | subpackages: 8 | - go 9 | - package: github.com/golang/protobuf 10 | version: v1.3.1 11 | subpackages: 12 | - proto 13 | - package: github.com/gogo/protobuf 14 | version: v1.2.1 15 | subpackages: 16 | - proto 17 | -------------------------------------------------------------------------------- /iterator_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestIterator(t *testing.T) { 6 | data := []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} 7 | it := NewIterator(data) 8 | pos := 0 9 | for { 10 | if valuePtr := it.Next(); valuePtr != nil { 11 | if data[pos] != *valuePtr { 12 | t.Fatal("iterator value mismatch") 13 | } 14 | pos++ 15 | continue 16 | } 17 | break 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /stl_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunWithSTL(t *testing.T) { 6 | ts := NewTimeSeriesFromCSV("testdata/co2.csv") 7 | scoreList := NewSTL().Width(35).Periodicity(12).Run(ts) 8 | 9 | if scoreList == nil { 10 | t.Fatalf("score list cannot be nil") 11 | } 12 | 13 | if len(scoreList.Scores) != ts.Size() { 14 | t.Fatalf("score list must have the same dimension as original time series") 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /weighted_sum_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunWithWeightedSum(t *testing.T) { 6 | timeSeries := &TimeSeries{ 7 | Timestamps: []float64{1, 1, 3, 4, 5, 6, 7, 8, 9, 10}, 8 | Values: []float64{56, 59, 52, 49, 49, 1.5, 48, 50, 53, 44}, 9 | } 10 | 11 | scoreList := NewWeightedSum().ScoreWeight(0.5).MinEmaScore(1.0).Run(timeSeries) 12 | 13 | if len(scoreList.Timestamps) != len(timeSeries.Timestamps) { 14 | t.Fatalf("score list and time series dimensions do not match") 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /semaphore.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | type empty struct{} 4 | type semaphore chan empty 5 | 6 | func (s semaphore) Lock() { 7 | s.acquire(1) 8 | } 9 | 10 | func (s semaphore) Unlock() { 11 | s.release(1) 12 | } 13 | 14 | func (s semaphore) Wait(n int) { 15 | s.acquire(n) 16 | } 17 | 18 | func (s semaphore) Signal() { 19 | s.release(1) 20 | } 21 | 22 | func (s semaphore) acquire(n int) { 23 | e := empty{} 24 | for i := 0; i < n; i++ { 25 | s <- e 26 | } 27 | } 28 | 29 | func (s semaphore) release(n int) { 30 | for i := 0; i < n; i++ { 31 | <-s 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /derivative_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunWithDerivative(t *testing.T) { 6 | timeSeries := &TimeSeries{ 7 | Timestamps: []float64{1, 1, 3, 4, 5, 6, 7, 8, 9, 10}, 8 | Values: []float64{56, 59, 52, 49, 49, 1.5, 48, 50, 53, 44}, 9 | } 10 | 11 | scoreList := NewDerivative().SmoothingFactor(0.3).Run(timeSeries) 12 | if scoreList == nil { 13 | t.Fatalf("score list cannot be nil") 14 | } 15 | 16 | if len(scoreList.Scores) != len(timeSeries.Values) { 17 | t.Fatalf("score list and time series dimensions do not match") 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /ema_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunWithEma(t *testing.T) { 6 | timeSeries := &TimeSeries{ 7 | Timestamps: []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 8 | Values: []float64{56, 59, 52, 49, 49, 1.5, 48, 50, 53, 44}, 9 | } 10 | 11 | scoreList := NewEma().LagWindowSize(3).SmoothingFactor(0.1).Run(timeSeries) 12 | 13 | if scoreList == nil { 14 | t.Fatalf("score list cannot be nil") 15 | } 16 | 17 | if len(scoreList.Scores) != len(timeSeries.Values) { 18 | t.Fatalf("score list and time series dimensions do not match") 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | workflows: 4 | version: 2 5 | test: 6 | jobs: 7 | - test-1.10 8 | - test-1.12 9 | 10 | jobs: 11 | test-1.10: 12 | docker: 13 | - image: circleci/golang:1.10 14 | working_directory: /go/src/github.com/project-anomalia/anomalia 15 | steps: 16 | - checkout 17 | - run: go get -v -t -d ./... 18 | - run: go test -v -race 19 | 20 | test-1.12: 21 | docker: 22 | - image: circleci/golang:1.12 23 | working_directory: ~/anomalia 24 | steps: 25 | - checkout 26 | - run: go test -v -race 27 | -------------------------------------------------------------------------------- /bitmap_binary_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestBitmapBinarySlice(t *testing.T) { 6 | var str BitmapBinary = "hello" 7 | slice := str.Slice(-1, 10) 8 | if slice != "h" { 9 | t.Fatalf("must return an empty string") 10 | } 11 | } 12 | 13 | func TestBitmapBinaryAtIndex(t *testing.T) { 14 | var str BitmapBinary = "binary" 15 | s := str.At(0) 16 | if s != "b" { 17 | t.Fatalf("must return the first character of the binary") 18 | } 19 | 20 | s = str.At(10) 21 | if s != "" { 22 | t.Fatalf("must return an empty string when index over binary length") 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /absolute_threshold_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunWithAbsoluteThreshold(t *testing.T) { 6 | timeSeries := &TimeSeries{ 7 | Timestamps: []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 8 | Values: []float64{0.4, 5.0, 5.0, 9.0, 1.0, 4.5, 3.0, 6.0, 1.4, 5.3}, 9 | } 10 | scoreList := NewAbsoluteThreshold().Thresholds(0.5, 2.5).Run(timeSeries) 11 | if scoreList == nil { 12 | t.Fatalf("score list cannot be nil") 13 | } 14 | 15 | if len(scoreList.Scores) != len(timeSeries.Values) { 16 | t.Fatalf("score list and time series dimensions do not match") 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /normal_distribution_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunWithNormalDistribution(t *testing.T) { 6 | timeSeries := &TimeSeries{ 7 | Timestamps: []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 8 | Values: []float64{56, 59, 52, 49, 49, 1.5, 48, 50, 53, 44}, 9 | } 10 | 11 | scoreList := NewNormalDistribution().Run(timeSeries) 12 | if scoreList == nil { 13 | t.Fatalf("score list cannot be nil") 14 | } 15 | 16 | anomalies := filter(scoreList.Scores, func(val float64) bool { return val != 0.0 }) 17 | if len(anomalies) != 1 { 18 | t.Fatalf("only 1 anomaly in the data set") 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /anomaly.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | // Anomaly holds information about the detected anomaly/outlier 4 | type Anomaly struct { 5 | Timestamp float64 6 | StartTimestamp float64 7 | EndTimestamp float64 8 | Score float64 9 | Value float64 10 | Severity string 11 | threshold float64 12 | } 13 | 14 | // GetTimeWindow returns anomaly start and end timestamps 15 | func (anomaly *Anomaly) GetTimeWindow() (float64, float64) { 16 | return anomaly.StartTimestamp, anomaly.EndTimestamp 17 | } 18 | 19 | // GetTimestampedScore returns anomaly exact timestamp with calculated score 20 | func (anomaly *Anomaly) GetTimestampedScore() (float64, float64) { 21 | return anomaly.Timestamp, anomaly.Score 22 | } 23 | -------------------------------------------------------------------------------- /score_list_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestDenoiseScoreList(t *testing.T) { 6 | denoised := fakeScoreList().Denoise() 7 | if denoised == nil { 8 | t.Fatalf("score list cannot be nil") 9 | } 10 | 11 | noisyScore := denoised.Scores[0] 12 | if noisyScore != 0.0 { 13 | t.Fatalf("noisy scores should be zeroed") 14 | } 15 | } 16 | 17 | func TestMaxOfScoreList(t *testing.T) { 18 | maxScore := fakeScoreList().Max() 19 | if maxScore != 4.6 { 20 | t.Fatalf("max score is incorrect") 21 | } 22 | } 23 | 24 | func fakeScoreList() *ScoreList { 25 | return &ScoreList{ 26 | Timestamps: []float64{1, 2, 3, 4, 5, 6}, 27 | Scores: []float64{0.0010, 4.6, 4.6, 4.6, 1.0, 1.0}, 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /iterator.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "sync" 4 | 5 | // Iterator wraps a slice of float64 values with the current element position 6 | type Iterator struct { 7 | data []float64 8 | current int 9 | mu sync.Mutex 10 | } 11 | 12 | // NewIterator returns an iterator instance 13 | func NewIterator(data []float64) *Iterator { 14 | return &Iterator{data: data, current: -1} 15 | } 16 | 17 | // Next returns next item from the iterator 18 | // It panics when iterator is exhausted. 19 | func (it *Iterator) Next() *float64 { 20 | it.mu.Lock() 21 | defer it.mu.Unlock() 22 | it.current++ 23 | 24 | if it.current >= len(it.data) { 25 | return nil 26 | } 27 | return it.value() 28 | } 29 | 30 | // Value return current value of the iterator 31 | func (it *Iterator) value() *float64 { 32 | return &it.data[it.current] 33 | } 34 | -------------------------------------------------------------------------------- /score_list.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | const noisePercentageThreshold = 0.001 4 | 5 | // ScoreList holds timestamps and their scores 6 | type ScoreList struct { 7 | Timestamps []float64 8 | Scores []float64 9 | } 10 | 11 | // Denoise sets low(noisy) scores to 0.0 12 | func (sl *ScoreList) Denoise() *ScoreList { 13 | threshold := noisePercentageThreshold * sl.Max() 14 | 15 | denoised := mapSlice(sl.Scores, func(score float64) float64 { 16 | if score < threshold { 17 | return 0.0 18 | } 19 | return score 20 | }) 21 | return &ScoreList{sl.Timestamps, denoised} 22 | } 23 | 24 | // Max returns the maximum of the scores 25 | func (sl *ScoreList) Max() float64 { 26 | _, max := minMax(sl.Scores) 27 | return max 28 | } 29 | 30 | // Zip convert the score list to map (map[Timestamp]Score) 31 | func (sl *ScoreList) Zip() map[float64]float64 { 32 | m := make(map[float64]float64) 33 | sorted := sortedCopy(sl.Timestamps) 34 | 35 | for idx, timestamp := range sorted { 36 | m[timestamp] = sl.Scores[idx] 37 | } 38 | return m 39 | } 40 | -------------------------------------------------------------------------------- /pearson_correlation_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "math" 5 | "testing" 6 | ) 7 | 8 | func TestRunPearsonCorrelationWhenTimeSeriesExactlyTheSame(t *testing.T) { 9 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 10 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 11 | 12 | coefficient := NewPearsonCorrelation(timeSeriesA, timeSeriesB).Run() 13 | if coefficient != 1 { 14 | t.Fatalf("must return exactly 1") 15 | } 16 | } 17 | 18 | func TestRunPearsonCorrelationWhenTimeSeriesHaveNoLinearRelation(t *testing.T) { 19 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{0, 3.2, 5.5, 7.1, 8.9, 9, 10.1, 10.5}) 20 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{-0.5, 1, 2.5, 4.1, 4.6, -1, 1, -1}) 21 | 22 | coefficient := NewPearsonCorrelation(timeSeriesA, timeSeriesB).Run() 23 | if math.Round(coefficient) != 0 { 24 | t.Fatalf("must return number close to 0") 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/project-anomalia/anomalia 2 | 3 | require ( 4 | github.com/chewxy/hm v1.0.0 // indirect 5 | github.com/chewxy/math32 v1.0.0 // indirect 6 | github.com/chewxy/stl v1.3.1 // indirect 7 | github.com/gogo/protobuf v1.2.1 // indirect 8 | github.com/golang/protobuf v1.3.1 // indirect 9 | github.com/google/flatbuffers v1.11.0 // indirect 10 | github.com/kisielk/errcheck v1.2.0 // indirect 11 | github.com/pkg/errors v0.8.1 // indirect 12 | github.com/project-anomalia/stl v1.3.1 13 | github.com/xtgo/set v1.0.0 // indirect 14 | golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4 // indirect 15 | golang.org/x/net v0.0.0-20190628185345-da137c7871d7 // indirect 16 | golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb // indirect 17 | golang.org/x/text v0.3.2 // indirect 18 | golang.org/x/tools v0.0.0-20190703172252-a00916dd39a5 // indirect 19 | gonum.org/v1/gonum v0.0.0-20190628223043-536a303fd62f // indirect 20 | gorgonia.org/tensor v0.8.1 // indirect 21 | gorgonia.org/vecf32 v0.7.0 // indirect 22 | gorgonia.org/vecf64 v0.7.0 // indirect 23 | ) 24 | -------------------------------------------------------------------------------- /normal_distribution.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | const defaultEpsilonThreshold = 0.0025 4 | 5 | // NormalDistribution holds the normal distribution algorithm configuration. 6 | type NormalDistribution struct { 7 | epsilonThreshold float64 8 | } 9 | 10 | // NewNormalDistribution returns normal distribution instance. 11 | func NewNormalDistribution() *NormalDistribution { 12 | return &NormalDistribution{defaultEpsilonThreshold} 13 | } 14 | 15 | // EpsilonThreshold sets the Gaussian epsilon threshold. 16 | func (nd *NormalDistribution) EpsilonThreshold(threshold float64) *NormalDistribution { 17 | nd.epsilonThreshold = threshold 18 | return nd 19 | } 20 | 21 | // Run runs the normal distribution algorithm over the time series. 22 | func (nd *NormalDistribution) Run(timeSeries *TimeSeries) *ScoreList { 23 | scoreList, _ := nd.computeScores(timeSeries) 24 | return scoreList 25 | } 26 | 27 | func (nd *NormalDistribution) computeScores(timeSeries *TimeSeries) (*ScoreList, error) { 28 | mean := timeSeries.Average() 29 | std := timeSeries.Stdev() 30 | 31 | scores := mapSlice(timeSeries.Values, func(value float64) float64 { 32 | score := Pdf(mean, std)(value) 33 | if score < nd.epsilonThreshold { 34 | return score 35 | } 36 | return 0.0 37 | }) 38 | 39 | scoreList := &ScoreList{timeSeries.Timestamps, scores} 40 | return scoreList, nil 41 | } 42 | -------------------------------------------------------------------------------- /absolute_threshold.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | // AbsoluteThreshold holds absolute threshold algorithm configuration. 4 | // It takes the difference of lower and upper thresholds with the current value as anomaly score. 5 | type AbsoluteThreshold struct { 6 | lowerThreshold float64 7 | upperThreshold float64 8 | } 9 | 10 | // NewAbsoluteThreshold returns AbsoluteThAbsoluteThreshold instance. 11 | func NewAbsoluteThreshold() *AbsoluteThreshold { 12 | return &AbsoluteThreshold{} 13 | } 14 | 15 | // Thresholds sets both lower and upper thresholds. 16 | func (at *AbsoluteThreshold) Thresholds(lower, upper float64) Algorithm { 17 | at.lowerThreshold = lower 18 | at.upperThreshold = upper 19 | return at 20 | } 21 | 22 | // Run runs the absolute threshold algorithm over the time series. 23 | func (at *AbsoluteThreshold) Run(timeSeries *TimeSeries) *ScoreList { 24 | scoreList, _ := at.computeScores(timeSeries) 25 | return scoreList 26 | } 27 | 28 | func (at *AbsoluteThreshold) computeScores(timeSeries *TimeSeries) (*ScoreList, error) { 29 | scores := mapSlice(timeSeries.Values, func(value float64) float64 { 30 | if value > at.upperThreshold { 31 | return value - at.upperThreshold 32 | } else if value < at.lowerThreshold { 33 | return at.lowerThreshold - value 34 | } else { 35 | return 0.0 36 | } 37 | }) 38 | scoreList := &ScoreList{timeSeries.Timestamps, scores} 39 | return scoreList, nil 40 | } 41 | -------------------------------------------------------------------------------- /detector_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunDefaultDetectorOnSmallDataset(t *testing.T) { 6 | timeSeries := generateFakeTimeSeries(100) 7 | scoreList := NewDetector(timeSeries).GetScores() 8 | if scoreList == nil { 9 | t.Fatalf("score list cannot be nil") 10 | } 11 | } 12 | 13 | func TestRunDefaultDetectorOnLargeDataset(t *testing.T) { 14 | timeSeries := generateFakeTimeSeries(3000) 15 | scoreList := NewDetector(timeSeries).Threshold(4.5).GetScores() 16 | if scoreList == nil { 17 | t.Fatalf("score list cannot be nil") 18 | } 19 | } 20 | 21 | func TestGetAnomaliesUsingDefaultDetector(t *testing.T) { 22 | timeSeries := generateFakeTimeSeries(2000) 23 | detector := NewDetector(timeSeries).Threshold(3.0) 24 | 25 | scores := detector.GetScores() 26 | anomalies := detector.GetAnomalies(scores) 27 | if len(anomalies) != 1 { 28 | t.Fatalf("should be a least one anomaly") 29 | } 30 | } 31 | 32 | func TestGetAnomaliesInTestData(t *testing.T) { 33 | ts := NewTimeSeriesFromCSV("testdata/airline-passengers.csv") 34 | detector := NewDetector(ts).Threshold(1.3) 35 | 36 | scoreList := NewSTL().Width(15).Periodicity(12).MethodType(Multiplicative).Run(ts).Denoise() 37 | if scoreList == nil { 38 | t.Fatalf("score list cannot be nil") 39 | } 40 | 41 | anomalies := detector.GetAnomalies(scoreList) 42 | if len(anomalies) != 2 { 43 | t.Fatalf("there are exactly 2 anomalies") 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /bitmap_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "math/rand" 5 | "sync" 6 | "testing" 7 | "time" 8 | ) 9 | 10 | var ( 11 | mu sync.Mutex 12 | generator = rand.New(rand.NewSource(time.Now().UnixNano())) 13 | ) 14 | 15 | func TestRunWithBitmap(t *testing.T) { 16 | timeSeries := generateFakeTimeSeries(2000) 17 | scoreList := NewBitmap().ChunkSize(3).Precision(5).Run(timeSeries) 18 | if scoreList == nil { 19 | t.Fatalf("score list cannot be nil") 20 | } 21 | 22 | if len(scoreList.Scores) != len(timeSeries.Timestamps) { 23 | t.Fatalf("both time series and score list dimensions do not match") 24 | } 25 | } 26 | 27 | func TestRunBitmapWhenNotEnoughDataPoints(t *testing.T) { 28 | timeSeries := &TimeSeries{ 29 | Timestamps: []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 30 | Values: []float64{1, 5, 52, 49, 49, 1.5, 48, 50, 53, 44}, 31 | } 32 | scoreList := NewBitmap().Run(timeSeries) 33 | if scoreList != nil { 34 | t.Fatalf("score list must be nil (not enough data points)") 35 | } 36 | } 37 | 38 | func generateFakeTimeSeries(datasetSize int) *TimeSeries { 39 | timestamps := make([]float64, datasetSize) 40 | for i := 0; i < datasetSize; i++ { 41 | timestamps[i] = float64(i) + 1 42 | } 43 | mu.Lock() 44 | values := make([]float64, datasetSize) 45 | for i := 0; i < datasetSize; i++ { 46 | values[i] = RandomSineValue(generator, datasetSize) 47 | } 48 | defer mu.Unlock() 49 | return &TimeSeries{timestamps, values} 50 | } 51 | -------------------------------------------------------------------------------- /bitmap_binary.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "unicode/utf8" 4 | 5 | // BitmapBinary wrapper type around a string with custom behaviour 6 | type BitmapBinary string 7 | 8 | // Slice slices a string in a Python-ish way 9 | // When lower == upper, it returns an empty string 10 | // When lower < 0 and upper < len(binary), it return an empty string 11 | // When lower < 0 and upper >= len(binary), it returns the first character 12 | // When lower >= 0 and upper >= len(binary), it slices the string from lower till end of string 13 | func (bb BitmapBinary) Slice(lower, upper int) BitmapBinary { 14 | var result string 15 | switch { 16 | case lower == upper: 17 | case (lower < 0) && (upper < len(bb)): 18 | result = "" 19 | case (lower < 0) && (upper >= len(bb)): 20 | result = bb.String()[0:1] 21 | case (lower >= 0) && (upper >= len(bb)): 22 | result = bb.String()[lower:] 23 | default: 24 | result = bb.String()[lower:upper] 25 | } 26 | return BitmapBinary(result) 27 | } 28 | 29 | // At returns character string at the specified index 30 | func (bb BitmapBinary) At(index int) BitmapBinary { 31 | if index >= len(bb) { 32 | return "" 33 | } 34 | return BitmapBinary(bb.String()[index]) 35 | } 36 | 37 | // String returns the underlying string 38 | func (bb BitmapBinary) String() string { 39 | return string(bb) 40 | } 41 | 42 | // Len returns the length of the underlying string 43 | func (bb BitmapBinary) Len() int { 44 | return utf8.RuneCountInString(bb.String()) 45 | } 46 | -------------------------------------------------------------------------------- /spearman_correlation_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "math" 5 | "testing" 6 | ) 7 | 8 | func TestRunSpearmanCorrelationWhenTimeSeriesExactlyTheSame(t *testing.T) { 9 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 10 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 11 | 12 | coefficient := NewSpearmanCorrelation(timeSeriesA, timeSeriesB).Run() 13 | if coefficient != 1 { 14 | t.Fatalf("must return exactly 1") 15 | } 16 | } 17 | 18 | func TestRunSpearmanCorrelationWhenTimeSeriesHaveNoLinearRelation(t *testing.T) { 19 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{0, 3.2, 5.5, 7.1, 8.9, 9, 10.1, 10.5}) 20 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{-0.5, 1, 2.5, 4.1, 4.6, -1, 1, -1}) 21 | 22 | coefficient := NewSpearmanCorrelation(timeSeriesA, timeSeriesB).Run() 23 | if math.Round(coefficient) != 0 { 24 | t.Fatalf("must return number close to 0") 25 | } 26 | } 27 | 28 | func TestRunSpearmanCorrelationExample(t *testing.T) { 29 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7, 8}, []float64{35, 23, 47, 17, 10, 43, 9, 6, 28}) 30 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7, 8}, []float64{30, 33, 45, 23, 8, 49, 12, 4, 31}) 31 | 32 | coefficient := NewSpearmanCorrelation(timeSeriesA, timeSeriesB).Run() 33 | if coefficient != 0.9 { 34 | t.Fatalf("incorrect rank correlation coefficient") 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /cross_correlation_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestNewCrossCorrelation(t *testing.T) { 6 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 7 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{2, 3, -2, 3, 2, 4, 1, -1}) 8 | correlator := NewCrossCorrelation(timeSeriesA, timeSeriesB).MaxShift(30).Impact(0.01) 9 | if correlator == nil { 10 | t.Fatalf("failed to initialize correlator") 11 | } 12 | } 13 | 14 | func TestRunCrossCorrelation(t *testing.T) { 15 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7, 8}, []float64{0, 0, 0, 0, 0.5, 1, 1, 1, 0}) 16 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7, 8}, []float64{0, 0.5, 1, 1, 1, 0, 0, 0, 0}) 17 | timeSeriesC := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5}, []float64{0, 0.5, 1, 1, 1, 0}) 18 | result1 := NewCrossCorrelation(timeSeriesA, timeSeriesB).GetCorrelationResult() 19 | result2 := NewCrossCorrelation(timeSeriesA, timeSeriesC).GetCorrelationResult() 20 | 21 | if result1.Coefficient != result2.Coefficient { 22 | t.Fatalf("correlation coefficient did not match") 23 | } 24 | 25 | if result1.Shift != result2.Shift { 26 | t.Fatalf("correlation shift did not match") 27 | } 28 | } 29 | 30 | func TestCorrelationWhenTimeSeriesExactlyTheSame(t *testing.T) { 31 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 32 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 33 | result := NewCrossCorrelation(timeSeriesA, timeSeriesB).Run() 34 | if result != 1 { 35 | t.Fatalf("incorrect coefficient: time series are exactly the same") 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /pearson_correlation.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "errors" 5 | "math" 6 | ) 7 | 8 | // PearsonCorrelation struct which holds the current and target time series. 9 | type PearsonCorrelation struct { 10 | current, target *TimeSeries 11 | } 12 | 13 | // NewPearsonCorrelation returns an instance of the pearson correlation struct. 14 | // It measures the linear correlation between the current and target time series. 15 | // It should be used when the two time series are normally distributed. 16 | // 17 | // The correlation coefficient always has a value between -1 and +1 where: 18 | // - +1 is total positive linear correlation 19 | // - 0 is no linear correlation 20 | // - −1 is total negative linear correlation 21 | // 22 | // For the used formula, check: https://en.wikipedia.org/wiki/Pearson_correlation_coefficient 23 | func NewPearsonCorrelation(current, target *TimeSeries) *PearsonCorrelation { 24 | return &PearsonCorrelation{current, target} 25 | } 26 | 27 | // Run runs the pearson correlation on the current and target time series. 28 | // It returns the correlation coefficient which always has a value between -1 and +1. 29 | func (pc *PearsonCorrelation) Run() float64 { 30 | currentSquares, targetSquares := sumOfSquares(pc.current.Values), sumOfSquares(pc.target.Values) 31 | currentAvg, targetAvg := Average(pc.current.Values), Average(pc.target.Values) 32 | n := float64(pc.current.Size()) 33 | denom := math.Sqrt((currentSquares - n*currentAvg*currentAvg) * (targetSquares - n*targetAvg*targetAvg)) 34 | 35 | if denom == 0 { 36 | return denom 37 | } 38 | return (sumOfProducts(pc.current.Values, pc.target.Values) - n*currentAvg*targetAvg) / denom 39 | } 40 | 41 | func (pc *PearsonCorrelation) sanityCheck() error { 42 | if pc.current.Size() != pc.target.Size() { 43 | return errors.New("current and target series do not have the same dimension") 44 | } 45 | return nil 46 | } 47 | -------------------------------------------------------------------------------- /testdata/airline-passengers.csv: -------------------------------------------------------------------------------- 1 | Date,Passengers 2 | 0,112 3 | 1,118 4 | 2,132 5 | 3,129 6 | 4,121 7 | 5,135 8 | 6,148 9 | 7,148 10 | 8,136 11 | 9,119 12 | 10,104 13 | 11,118 14 | 12,115 15 | 13,126 16 | 14,141 17 | 15,135 18 | 16,125 19 | 17,149 20 | 18,170 21 | 19,170 22 | 20,158 23 | 21,133 24 | 22,114 25 | 23,140 26 | 24,145 27 | 25,150 28 | 26,178 29 | 27,163 30 | 28,172 31 | 29,178 32 | 30,199 33 | 31,199 34 | 32,184 35 | 33,162 36 | 34,146 37 | 35,166 38 | 36,171 39 | 37,180 40 | 38,193 41 | 39,181 42 | 40,183 43 | 41,218 44 | 42,230 45 | 43,242 46 | 44,209 47 | 45,191 48 | 46,172 49 | 47,194 50 | 48,196 51 | 49,196 52 | 50,236 53 | 51,235 54 | 52,229 55 | 53,243 56 | 54,264 57 | 55,272 58 | 56,237 59 | 57,211 60 | 58,180 61 | 59,201 62 | 60,204 63 | 61,188 64 | 62,235 65 | 63,227 66 | 64,234 67 | 65,264 68 | 66,302 69 | 67,293 70 | 68,259 71 | 69,229 72 | 70,203 73 | 71,229 74 | 72,242 75 | 73,233 76 | 74,267 77 | 75,269 78 | 76,270 79 | 77,315 80 | 78,364 81 | 79,347 82 | 80,312 83 | 81,274 84 | 82,237 85 | 83,278 86 | 84,284 87 | 85,277 88 | 86,317 89 | 87,313 90 | 88,318 91 | 89,374 92 | 90,413 93 | 91,405 94 | 92,355 95 | 93,306 96 | 94,271 97 | 95,306 98 | 96,315 99 | 97,301 100 | 98,356 101 | 99,348 102 | 100,355 103 | 101,422 104 | 102,465 105 | 103,467 106 | 104,404 107 | 105,347 108 | 106,305 109 | 107,336 110 | 108,340 111 | 109,318 112 | 110,362 113 | 111,348 114 | 112,363 115 | 113,435 116 | 114,491 117 | 115,505 118 | 116,404 119 | 117,359 120 | 118,310 121 | 119,337 122 | 120,360 123 | 121,342 124 | 122,406 125 | 123,396 126 | 124,420 127 | 125,472 128 | 126,548 129 | 127,559 130 | 128,463 131 | 129,407 132 | 130,362 133 | 131,405 134 | 132,417 135 | 133,391 136 | 134,419 137 | 135,461 138 | 136,472 139 | 137,535 140 | 138,622 141 | 139,606 142 | 140,508 143 | 141,461 144 | 142,390 145 | 143,432 146 | -------------------------------------------------------------------------------- /glide.lock: -------------------------------------------------------------------------------- 1 | hash: 380d9f043c34878885e8be812aeb2bc370ad79e268a3e3bd0918239edc979062 2 | updated: 2019-07-03T19:52:10.145435+02:00 3 | imports: 4 | - name: github.com/chewxy/hm 5 | version: 61efb3290a086d1335e8954b3734c102126818ba 6 | - name: github.com/chewxy/math32 7 | version: 9a000fcb79dff2019bd78fc28bd676198ff3a616 8 | - name: github.com/chewxy/stl 9 | version: ed9071568c44065f500b88c4eae1bb3694a1f0c3 10 | subpackages: 11 | - loess 12 | - name: github.com/gogo/protobuf 13 | version: ba06b47c162d49f2af050fb4c75bcbc86a159d5c 14 | subpackages: 15 | - gogoproto 16 | - proto 17 | - protoc-gen-gogo/descriptor 18 | - name: github.com/golang/protobuf 19 | version: b5d812f8a3706043e23a9cd5babf2e5423744d30 20 | subpackages: 21 | - proto 22 | - name: github.com/google/flatbuffers 23 | version: bf9eb67ab9371755c6bcece13cadc7693bcbf264 24 | subpackages: 25 | - go 26 | - name: github.com/pkg/errors 27 | version: 27936f6d90f9c8e1145f11ed52ffffbfdb9e0af7 28 | - name: github.com/project-anomalia/stl 29 | version: ed9071568c44065f500b88c4eae1bb3694a1f0c3 30 | - name: github.com/xtgo/set 31 | version: 708d80f4a27458f99f8ca12bd0e638c6ee65627f 32 | - name: gonum.org/v1/gonum 33 | version: 536a303fd62fe10303f5c75bf644af9f54e90c25 34 | subpackages: 35 | - blas 36 | - blas/blas64 37 | - blas/cblas128 38 | - blas/gonum 39 | - floats 40 | - internal/asm/c128 41 | - internal/asm/c64 42 | - internal/asm/f32 43 | - internal/asm/f64 44 | - internal/cmplx64 45 | - internal/math32 46 | - lapack 47 | - lapack/gonum 48 | - lapack/lapack64 49 | - mat 50 | - name: gorgonia.org/tensor 51 | version: 663333fb2053f3ceaf5286ede78b8fe80f7ce0af 52 | subpackages: 53 | - internal/execution 54 | - internal/serialization/fb 55 | - internal/serialization/pb 56 | - internal/storage 57 | - native 58 | - name: gorgonia.org/vecf32 59 | version: a06d791a4e66be4d2fcae4530bb67afa232a0dae 60 | - name: gorgonia.org/vecf64 61 | version: d21373a26cc12a37eaab0182a21d0225c3b7ef34 62 | testImports: [] 63 | -------------------------------------------------------------------------------- /weighted_sum.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "math" 4 | 5 | // WeightedSum holds the weighted sum algorithm configuration. 6 | // 7 | // The weighted sum algorithm uses a weighted sum to calculate anomalies scores. 8 | // It should be used ONLY on small data-sets. 9 | type WeightedSum struct { 10 | scoreWeight float64 11 | minEmaScore float64 12 | *ExponentialMovingAverage 13 | *Derivative 14 | } 15 | 16 | // NewWeightedSum returns weighted sum instance 17 | func NewWeightedSum() *WeightedSum { 18 | return &WeightedSum{ 19 | scoreWeight: 0.65, 20 | minEmaScore: 0.94, 21 | ExponentialMovingAverage: &ExponentialMovingAverage{2, 0.2}, 22 | Derivative: &Derivative{0.2}, 23 | } 24 | } 25 | 26 | // ScoreWeight sets Ema's score weight. 27 | func (ws *WeightedSum) ScoreWeight(weight float64) *WeightedSum { 28 | ws.scoreWeight = weight 29 | return ws 30 | } 31 | 32 | // MinEmaScore sets the minimal Ema score above which the weighted score is used. 33 | func (ws *WeightedSum) MinEmaScore(value float64) *WeightedSum { 34 | ws.minEmaScore = value 35 | return ws 36 | } 37 | 38 | // Run runs the weighted sum algorithm over the time series 39 | func (ws *WeightedSum) Run(timeSeries *TimeSeries) *ScoreList { 40 | scoreList, _ := ws.computeScores(timeSeries) 41 | return scoreList 42 | } 43 | 44 | func (ws *WeightedSum) computeScores(timeSeries *TimeSeries) (*ScoreList, error) { 45 | emaScores := ws.ExponentialMovingAverage.Run(timeSeries).Zip() 46 | derivativeScores := ws.Derivative.Run(timeSeries).Zip() 47 | 48 | scores := mapSlice(timeSeries.Timestamps, func(timestamp float64) float64 { 49 | weightedScore := emaScores[timestamp]*ws.scoreWeight + derivativeScores[timestamp]*(1-ws.scoreWeight) 50 | score := math.Max(emaScores[timestamp], weightedScore) 51 | 52 | if emaScores[timestamp] > ws.minEmaScore { 53 | return math.Max(score, derivativeScores[timestamp]) 54 | } 55 | return score 56 | }) 57 | 58 | scoreList := (&ScoreList{timeSeries.Timestamps, scores}).Denoise() 59 | return scoreList, nil 60 | } 61 | -------------------------------------------------------------------------------- /ema.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "math" 4 | 5 | // ExponentialMovingAverage holds the algorithm configuration. 6 | // It uses the value's deviation from the exponential moving average 7 | // of a lagging window to determine anomalies scores. 8 | type ExponentialMovingAverage struct { 9 | lagWindowSize int 10 | smoothingFactor float64 11 | } 12 | 13 | // NewEma returns ExponentialMovingAverage instance 14 | func NewEma() *ExponentialMovingAverage { 15 | return &ExponentialMovingAverage{2, 0.2} 16 | } 17 | 18 | // LagWindowSize sets the lagging window size. 19 | func (ema *ExponentialMovingAverage) LagWindowSize(size int) *ExponentialMovingAverage { 20 | ema.lagWindowSize = size 21 | return ema 22 | } 23 | 24 | // SmoothingFactor sets the smoothing factor. 25 | func (ema *ExponentialMovingAverage) SmoothingFactor(factor float64) *ExponentialMovingAverage { 26 | ema.smoothingFactor = factor 27 | return ema 28 | } 29 | 30 | // Run runs the exponential moving average algorithm over the time series 31 | func (ema *ExponentialMovingAverage) Run(timeSeries *TimeSeries) *ScoreList { 32 | scoreList, _ := ema.computeScores(timeSeries) 33 | return scoreList 34 | } 35 | 36 | func (ema *ExponentialMovingAverage) computeScores(timeSeries *TimeSeries) (*ScoreList, error) { 37 | stdev := timeSeries.Stdev() 38 | scores := mapSliceWithIndex(timeSeries.Values, func(idx int, value float64) float64 { 39 | score := 0.0 40 | if idx < ema.lagWindowSize { 41 | score = computeScoresInLagWindow(timeSeries.Values[:idx+1], value, ema.smoothingFactor) 42 | } else { 43 | score = computeScoresInLagWindow(timeSeries.Values[idx-ema.lagWindowSize:idx+1], value, ema.smoothingFactor) 44 | } 45 | 46 | if stdev > 0.0 { 47 | score = score / stdev 48 | } 49 | return score 50 | }) 51 | 52 | scoreList := &ScoreList{timeSeries.Timestamps, scores} 53 | return scoreList, nil 54 | } 55 | 56 | func computeScoresInLagWindow(data []float64, value, smoothingFactor float64) float64 { 57 | ema := Ema(data, smoothingFactor)[len(data)-1] 58 | return math.Abs(value - ema) 59 | } 60 | -------------------------------------------------------------------------------- /derivative.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "math" 4 | 5 | // Derivative holds the derivative algorithm configuration. 6 | // It uses the derivative of the current value as anomaly score. 7 | type Derivative struct { 8 | smoothingFactor float64 9 | } 10 | 11 | // NewDerivative return Derivative instance 12 | func NewDerivative() *Derivative { 13 | return &Derivative{0.2} 14 | } 15 | 16 | // SmoothingFactor sets the smoothing factor. 17 | func (d *Derivative) SmoothingFactor(factor float64) *Derivative { 18 | d.smoothingFactor = factor 19 | return d 20 | } 21 | 22 | // Run runs the derivative algorithm over the time series 23 | func (d *Derivative) Run(timeSeries *TimeSeries) *ScoreList { 24 | scoreList, _ := d.computeScores(timeSeries) 25 | return scoreList 26 | } 27 | 28 | func (d *Derivative) computeScores(timeSeries *TimeSeries) (*ScoreList, error) { 29 | derivatives := d.computeDerivatives(timeSeries) 30 | derivativesEma := Ema(derivatives, d.smoothingFactor) 31 | 32 | scores := mapSliceWithIndex(timeSeries.Values, func(i int, value float64) float64 { 33 | return math.Abs(derivatives[i] - derivativesEma[i]) 34 | }) 35 | 36 | stdev := Stdev(scores) 37 | if stdev != 0.0 { 38 | scores = mapSlice(scores, func(score float64) float64 { 39 | return score / stdev 40 | }) 41 | } 42 | scoreList := (&ScoreList{timeSeries.Timestamps, scores}).Denoise() 43 | return scoreList, nil 44 | } 45 | 46 | func (d *Derivative) computeDerivatives(timeSeries *TimeSeries) []float64 { 47 | zippedSeries := timeSeries.Zip() 48 | derivatives := make([]float64, 0, len(zippedSeries)) 49 | 50 | for i, timestamp := range timeSeries.Timestamps { 51 | if i > 0 { 52 | preTimestamp := timeSeries.Timestamps[i-1] 53 | preValue := zippedSeries[preTimestamp] 54 | 55 | currentValue := zippedSeries[timestamp] 56 | delta := timestamp - preTimestamp 57 | 58 | derivative := 0.0 59 | if delta != 0 { 60 | derivative = (currentValue - preValue) / delta 61 | } else { 62 | derivative = currentValue - preValue 63 | } 64 | derivatives = append(derivatives, math.Abs(derivative)) 65 | } 66 | } 67 | 68 | if len(derivatives) != 0 { 69 | derivatives = insertAt(derivatives, 0, derivatives[0]) 70 | } 71 | return derivatives 72 | } 73 | -------------------------------------------------------------------------------- /math_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "math" 5 | "testing" 6 | ) 7 | 8 | var input = []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} 9 | 10 | func TestAverage(t *testing.T) { 11 | actual := Average(input) 12 | expected := 5.5 13 | if actual != expected { 14 | t.Fatalf("expected %v, got %v", expected, actual) 15 | } 16 | } 17 | 18 | func TestSumFloat64s(t *testing.T) { 19 | actual := SumFloat64s(input) 20 | expected := 55.0 21 | if actual != expected { 22 | t.Fatalf("expected %v, got %v", expected, actual) 23 | } 24 | } 25 | 26 | func TestVariance(t *testing.T) { 27 | actual := Variance(input) 28 | expected := 8.25 29 | if actual != expected { 30 | t.Fatalf("expected %v, got %v", expected, actual) 31 | } 32 | } 33 | 34 | func TestStdev(t *testing.T) { 35 | actual := Stdev(input) 36 | expected := math.Sqrt(8.25) 37 | if actual != expected { 38 | t.Fatalf("expected %v, got %v", expected, actual) 39 | } 40 | } 41 | 42 | func TestRoundFloat(t *testing.T) { 43 | actual := RoundFloat(0.5) 44 | expected := 1 45 | if actual != expected { 46 | t.Fatalf("expected %v, got %v", expected, actual) 47 | } 48 | } 49 | 50 | func TestFloat64WithPrecision(t *testing.T) { 51 | actual := Float64WithPrecision(1.45424, 2) 52 | expected := 1.45 53 | if actual != expected { 54 | t.Fatalf("expected %v, got %v", expected, actual) 55 | } 56 | } 57 | 58 | func TestPdf(t *testing.T) { 59 | actual := Pdf(0.0, 1.0)(1.0) 60 | expected := 0.24197072451914337 61 | if actual != expected { 62 | t.Fatalf("expected %v, got %v", expected, actual) 63 | } 64 | } 65 | 66 | func TestCdf(t *testing.T) { 67 | actual := Cdf(0.0, 1.0)(1.0) 68 | expected := 0.8413447361676363 69 | if actual != expected { 70 | t.Fatalf("expected %v, got %v", expected, actual) 71 | } 72 | } 73 | 74 | func TestErf(t *testing.T) { 75 | actual := Erf(1.0) 76 | expected := 0.8427006897475899 77 | if actual != expected { 78 | t.Fatalf("expected %v, got %v", expected, actual) 79 | } 80 | 81 | actual = Erf(-1.0) 82 | expected = -expected 83 | if actual != expected { 84 | t.Fatalf("expected %v, got %v", expected, actual) 85 | } 86 | } 87 | 88 | func TestEma(t *testing.T) { 89 | data := []float64{0.5, 5.0, 2.0, 2.0} 90 | expected := Ema(data, 0.2) 91 | if len(data) != len(expected) { 92 | t.Fatalf("input and ema lenghts do not match") 93 | } 94 | } 95 | 96 | func TestAbsInt(t *testing.T) { 97 | if AbsInt(-5) != 5 { 98 | t.Fatalf("wrong absolute value") 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /detector.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | // Detector is the default anomaly detector 4 | type Detector struct { 5 | threshold float64 6 | timeSeries *TimeSeries 7 | } 8 | 9 | // NewDetector return an instance of the default detector. 10 | func NewDetector(ts *TimeSeries) *Detector { 11 | return &Detector{threshold: 2.0, timeSeries: ts} 12 | } 13 | 14 | // Threshold sets the threshold used by the detector. 15 | func (d *Detector) Threshold(threshold float64) *Detector { 16 | d.threshold = threshold 17 | return d 18 | } 19 | 20 | // GetScores runs the detector on the supplied time series. 21 | // It uses the Bitmap algorithm to calculate the score list and falls back 22 | // to the normal distribution algorithm in case of not enough data points in the time series. 23 | func (d *Detector) GetScores() *ScoreList { 24 | if scoreList := NewBitmap().Run(d.timeSeries); scoreList != nil { 25 | return scoreList 26 | } 27 | return NewWeightedSum().Run(d.timeSeries) 28 | } 29 | 30 | // GetAnomalies detects anomalies using the specified threshold on scores 31 | func (d *Detector) GetAnomalies(scoreList *ScoreList) []Anomaly { 32 | var ( 33 | zippedSeries = d.timeSeries.Zip() 34 | scores = scoreList.Zip() 35 | anomalies = make([]Anomaly, 0) 36 | intervals = make([]TimePeriod, 0) 37 | ) 38 | 39 | // Find all anomalies intervals 40 | var start, end float64 41 | for _, timestamp := range scoreList.Timestamps { 42 | if scores[timestamp] > d.threshold { 43 | end = timestamp 44 | if start == 0 { 45 | start = timestamp 46 | } 47 | } else if (start != 0) && (end != 0) { 48 | intervals = append(intervals, TimePeriod{start, end}) 49 | start = 0 50 | end = 0 51 | } 52 | } 53 | 54 | // Locate the exact anomaly timestamp within each interval 55 | for _, interval := range intervals { 56 | intervalSeries := d.timeSeries.Crop(interval.Start, interval.Start) 57 | refinedScoreList := NewEma().Run(intervalSeries) 58 | maxRefinedScore := refinedScoreList.Max() 59 | 60 | // Get timestamp of the maximal score 61 | if index := indexOf(refinedScoreList.Scores, maxRefinedScore); index != -1 { 62 | maxRefinedTimestamp := refinedScoreList.Timestamps[index] 63 | // Create the anomaly 64 | anomaly := Anomaly{ 65 | Timestamp: maxRefinedTimestamp, 66 | Value: zippedSeries[maxRefinedTimestamp], 67 | StartTimestamp: interval.Start, 68 | EndTimestamp: interval.End, 69 | Score: maxRefinedScore, 70 | threshold: d.threshold, 71 | } 72 | anomalies = append(anomalies, anomaly) 73 | } 74 | } 75 | return anomalies 76 | } 77 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/go,intellij,visualstudiocode 3 | # Edit at https://www.gitignore.io/?templates=go,intellij,visualstudiocode 4 | 5 | ### Go ### 6 | # Binaries for programs and plugins 7 | *.exe 8 | *.exe~ 9 | *.dll 10 | *.so 11 | *.dylib 12 | 13 | # Test binary, built with `go test -c` 14 | *.test 15 | 16 | # Output of the go coverage tool, specifically when used with LiteIDE 17 | *.out 18 | 19 | ### Go Patch ### 20 | /vendor/ 21 | /Godeps/ 22 | 23 | ### Intellij ### 24 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 25 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 26 | 27 | /.idea/ 28 | 29 | # User-specific stuff 30 | .idea/**/workspace.xml 31 | .idea/**/tasks.xml 32 | .idea/**/usage.statistics.xml 33 | .idea/**/dictionaries 34 | .idea/**/shelf 35 | 36 | # Generated files 37 | .idea/**/contentModel.xml 38 | 39 | # Sensitive or high-churn files 40 | .idea/**/dataSources/ 41 | .idea/**/dataSources.ids 42 | .idea/**/dataSources.local.xml 43 | .idea/**/sqlDataSources.xml 44 | .idea/**/dynamic.xml 45 | .idea/**/uiDesigner.xml 46 | .idea/**/dbnavigator.xml 47 | 48 | # Gradle 49 | .idea/**/gradle.xml 50 | .idea/**/libraries 51 | 52 | # Gradle and Maven with auto-import 53 | # When using Gradle or Maven with auto-import, you should exclude module files, 54 | # since they will be recreated, and may cause churn. Uncomment if using 55 | # auto-import. 56 | # .idea/modules.xml 57 | # .idea/*.iml 58 | # .idea/modules 59 | 60 | # CMake 61 | cmake-build-*/ 62 | 63 | # Mongo Explorer plugin 64 | .idea/**/mongoSettings.xml 65 | 66 | # File-based project format 67 | *.iws 68 | 69 | # IntelliJ 70 | out/ 71 | 72 | # mpeltonen/sbt-idea plugin 73 | .idea_modules/ 74 | 75 | # JIRA plugin 76 | atlassian-ide-plugin.xml 77 | 78 | # Cursive Clojure plugin 79 | .idea/replstate.xml 80 | 81 | # Crashlytics plugin (for Android Studio and IntelliJ) 82 | com_crashlytics_export_strings.xml 83 | crashlytics.properties 84 | crashlytics-build.properties 85 | fabric.properties 86 | 87 | # Editor-based Rest Client 88 | .idea/httpRequests 89 | 90 | # Android studio 3.1+ serialized cache file 91 | .idea/caches/build_file_checksums.ser 92 | 93 | # JetBrains templates 94 | **___jb_tmp___ 95 | 96 | ### Intellij Patch ### 97 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 98 | 99 | # *.iml 100 | # modules.xml 101 | # .idea/misc.xml 102 | # *.ipr 103 | 104 | # Sonarlint plugin 105 | .idea/sonarlint 106 | 107 | ### VisualStudioCode ### 108 | .vscode/* 109 | !.vscode/settings.json 110 | !.vscode/tasks.json 111 | !.vscode/launch.json 112 | !.vscode/extensions.json 113 | 114 | ### VisualStudioCode Patch ### 115 | # Ignore all local history of files 116 | .history 117 | 118 | # End of https://www.gitignore.io/api/go,intellij,visualstudiocode 119 | -------------------------------------------------------------------------------- /correlator_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "testing" 4 | 5 | func TestRunCorrelatorWithXCorr(t *testing.T) { 6 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 7 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 8 | 9 | coefficient := NewCorrelator(timeSeriesA, timeSeriesB). 10 | CorrelationMethod(XCorr, []float64{30, 0.01}). 11 | UseAnomalyScore(true). 12 | Run() 13 | if coefficient != 1.0 { 14 | t.Fatalf("incorrect coefficient: time series are exactly the same") 15 | } 16 | } 17 | 18 | func TestRunCorrelatorWithSpearmanRank(t *testing.T) { 19 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 20 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 21 | 22 | coefficient := NewCorrelator(timeSeriesA, timeSeriesB). 23 | CorrelationMethod(SpearmanRank, nil). 24 | TimePeriod(0, 2). 25 | Run() 26 | if coefficient != 1.0 { 27 | t.Fatalf("incorrect coefficient: time series are exactly the same") 28 | } 29 | } 30 | 31 | func TestRunCorrelatorWithPearson(t *testing.T) { 32 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 33 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5, 6, 7}, []float64{1, 2, -2, 4, 2, 3, 1, 0}) 34 | 35 | coefficient := NewCorrelator(timeSeriesA, timeSeriesB).CorrelationMethod(Pearson, nil).Run() 36 | if coefficient != 1.0 { 37 | t.Fatalf("incorrect coefficient: time series are exactly the same") 38 | } 39 | } 40 | 41 | func TestRunPearsonCorrelationWhenTimeSeriesHaveDifferentSizes(t *testing.T) { 42 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4}, []float64{0, 3.2, 5.5, 7.1, 8.9}) 43 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5}, []float64{-0.5, 1, 2.5, 4.1, 4.6, -1}) 44 | 45 | // Assert panic 46 | defer func() { 47 | if r := recover(); r == nil { 48 | t.Errorf("correlator did not panic") 49 | } 50 | }() 51 | 52 | NewCorrelator(timeSeriesA, timeSeriesB).CorrelationMethod(Pearson, nil).Run() 53 | } 54 | 55 | func TestXCorrelationWhenNotEnoughDataPoints(t *testing.T) { 56 | timeSeriesA := NewTimeSeries([]float64{0, 1}, []float64{0.5, 0}) 57 | timeSeriesB := NewTimeSeries([]float64{0}, []float64{0.5}) 58 | 59 | // Assert panic 60 | defer func() { 61 | if r := recover(); r == nil { 62 | t.Errorf("correlator did not panic") 63 | } 64 | }() 65 | 66 | NewCorrelator(timeSeriesA, timeSeriesB).UseAnomalyScore(true).Run() 67 | } 68 | 69 | func TestRunSpearmanCorrelationWhenTimeSeriesHaveDifferentSizes(t *testing.T) { 70 | timeSeriesA := NewTimeSeries([]float64{0, 1, 2, 3, 4}, []float64{0, 3.2, 5.5, 7.1, 8.9}) 71 | timeSeriesB := NewTimeSeries([]float64{0, 1, 2, 3, 4, 5}, []float64{-0.5, 1, 2.5, 4.1, 4.6, -1}) 72 | 73 | // Assert panic 74 | defer func() { 75 | if r := recover(); r == nil { 76 | t.Errorf("correlator did not panic") 77 | } 78 | }() 79 | 80 | NewCorrelator(timeSeriesA, timeSeriesB).CorrelationMethod(SpearmanRank, nil).Run() 81 | } 82 | -------------------------------------------------------------------------------- /correlator.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | // CorrelationAlgorithm base interface for correlation algorithms. 4 | type CorrelationAlgorithm interface { 5 | Run() float64 6 | sanityCheck() error 7 | } 8 | 9 | // CorrelationMethod type checker for correlation method 10 | type CorrelationMethod int32 11 | 12 | const ( 13 | // XCorr represents the Cross Correlation algorithm. 14 | XCorr CorrelationMethod = iota 15 | // SpearmanRank represents the Spearman Rank Correlation algorithm. 16 | SpearmanRank 17 | // Pearson represents the Pearson Correlation algorithm. 18 | Pearson 19 | ) 20 | 21 | // Correlator holds the correlator configuration. 22 | type Correlator struct { 23 | current, target *TimeSeries 24 | algorithm CorrelationAlgorithm 25 | useAnomalyScore bool 26 | } 27 | 28 | // NewCorrelator returns an instance of the correlation algorithm. 29 | func NewCorrelator(current, target *TimeSeries) *Correlator { 30 | if current == nil || target == nil { 31 | panic("either current or target time series cannot be nil") 32 | } 33 | return &Correlator{ 34 | current: current, 35 | target: target, 36 | } 37 | } 38 | 39 | // CorrelationMethod specifies which correlation method to use (XCross or SpearmanRank). 40 | func (c *Correlator) CorrelationMethod(method CorrelationMethod, options []float64) *Correlator { 41 | c.algorithm = c.getCorrelationAlgorithmByMethod(method, options) 42 | return c 43 | } 44 | 45 | // TimePeriod crops the current and target time series to specified range. 46 | func (c *Correlator) TimePeriod(start, end float64) *Correlator { 47 | c.current = c.current.Crop(start, end) 48 | c.target = c.target.Crop(start, end) 49 | return c 50 | } 51 | 52 | // UseAnomalyScore tells the correlator to calculate anomaly scores from both time series. 53 | func (c *Correlator) UseAnomalyScore(use bool) *Correlator { 54 | c.useAnomalyScore = use 55 | return c 56 | } 57 | 58 | // Run runs the correlator. 59 | func (c *Correlator) Run() float64 { 60 | if err := c.algorithm.sanityCheck(); err != nil { 61 | panic(err) 62 | } 63 | 64 | if c.useAnomalyScore { 65 | c.current = getAnomalyScores(NewDetector(c.current)) 66 | c.target = getAnomalyScores(NewDetector(c.target)) 67 | } 68 | 69 | return c.algorithm.Run() 70 | } 71 | 72 | func (c *Correlator) getCorrelationAlgorithmByMethod(method CorrelationMethod, options []float64) CorrelationAlgorithm { 73 | var algorithm CorrelationAlgorithm 74 | switch method { 75 | case XCorr: 76 | algorithm = NewCrossCorrelation(c.current, c.target) 77 | if options != nil && len(options) > 0 { 78 | algorithm = algorithm.(*CrossCorrelation).MaxShift(options[0]).Impact(options[1]) 79 | } 80 | case SpearmanRank: 81 | algorithm = NewSpearmanCorrelation(c.current, c.target) 82 | case Pearson: 83 | algorithm = NewPearsonCorrelation(c.current, c.target) 84 | default: 85 | panic("unsupported correlation method/algorithm") 86 | } 87 | return algorithm 88 | } 89 | 90 | func getAnomalyScores(detector *Detector) *TimeSeries { 91 | scoreList := detector.GetScores() 92 | if scoreList == nil { 93 | panic("failed to calculate anomaly scores") 94 | } 95 | return &TimeSeries{scoreList.Timestamps, scoreList.Scores} 96 | } 97 | -------------------------------------------------------------------------------- /stl.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import "github.com/project-anomalia/stl" 4 | 5 | type STLMethod int32 6 | 7 | const ( 8 | // Additive method suggests that the components are added together (linear model). 9 | Additive STLMethod = iota 10 | 11 | // Multiplicative method suggests that the components are multiplied together (non-linear model). 12 | Multiplicative 13 | ) 14 | 15 | // STL holds Seasonal-Trend With Loess algorithm configuration. 16 | // 17 | // The STL algorithm decomposes a time series into seasonal, trend and remainder components. 18 | // The paper describing this algorithm can found here: https://search.proquest.com/openview/cc5001e8a0978a6c029ae9a41af00f21 19 | type STL struct { 20 | periodicity int 21 | width int 22 | robustIterations stl.Opt 23 | iterations stl.Opt 24 | seasonalConfig *stl.Config 25 | trendConfig *stl.Config 26 | lowPassFilterConfig *stl.Config 27 | method stl.ModelType 28 | } 29 | 30 | // NewSTL returns an instance of the STL struct. 31 | func NewSTL() *STL { 32 | return &STL{ 33 | robustIterations: stl.WithRobustIter(0), 34 | iterations: stl.WithIter(2), 35 | method: stl.Additive(), 36 | } 37 | } 38 | 39 | func (s *STL) Periodicity(p int) *STL { 40 | s.periodicity = p 41 | return s 42 | } 43 | 44 | func (s *STL) Width(w int) *STL { 45 | s.width = w 46 | return s 47 | } 48 | 49 | func (s *STL) RobustIterations(n int) *STL { 50 | s.robustIterations = stl.WithRobustIter(n) 51 | return s 52 | } 53 | 54 | func (s *STL) Iterations(n int) *STL { 55 | s.iterations = stl.WithIter(n) 56 | return s 57 | } 58 | 59 | func (s *STL) SeasonalConfig(config *stl.Config) *STL { 60 | s.seasonalConfig = config 61 | return s 62 | } 63 | 64 | func (s *STL) TrendConfig(config *stl.Config) *STL { 65 | s.trendConfig = config 66 | return s 67 | } 68 | 69 | func (s *STL) LowPassFilterConfig(config *stl.Config) *STL { 70 | s.lowPassFilterConfig = config 71 | return s 72 | } 73 | 74 | func (s *STL) MethodType(method STLMethod) *STL { 75 | switch method { 76 | case Additive: 77 | s.method = stl.Additive() 78 | case Multiplicative: 79 | s.method = stl.Multiplicative() 80 | default: 81 | panic("invalid STL method type") 82 | } 83 | return s 84 | } 85 | 86 | // Run runs the STL algorithm over the time series. 87 | func (s *STL) Run(timeSeries *TimeSeries) *ScoreList { 88 | scoreList, _ := s.computeScores(timeSeries) 89 | return scoreList 90 | } 91 | 92 | func (s *STL) computeScores(timeSeries *TimeSeries) (*ScoreList, error) { 93 | options := []stl.Opt{s.iterations, s.robustIterations} 94 | 95 | if s.seasonalConfig != nil { 96 | options = append(options, stl.WithSeasonalConfig(*s.seasonalConfig)) 97 | } 98 | 99 | if s.trendConfig != nil { 100 | options = append(options, stl.WithTrendConfig(*s.trendConfig)) 101 | } 102 | 103 | if s.lowPassFilterConfig != nil { 104 | options = append(options, stl.WithLowpassConfig(*s.lowPassFilterConfig)) 105 | } 106 | 107 | result := stl.Decompose(timeSeries.Values, s.periodicity, s.width, s.method, options...) 108 | if result.Err != nil { 109 | return nil, result.Err 110 | } 111 | return &ScoreList{timeSeries.Timestamps, result.Resid}, nil 112 | } 113 | -------------------------------------------------------------------------------- /spearman_correlation.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "errors" 5 | "sort" 6 | ) 7 | 8 | // SpearmanCorrelation holds the Spearman Correlation algorithm configuration. 9 | // It is the non-parametric version of the Pearson correlation and it should be used 10 | // when the time series distribution is unknown or not normally distributed. 11 | // 12 | // Spearman’s correlator returns a value from -1 to 1, where: 13 | // - +1 = a perfect positive correlation between ranks 14 | // - -1 = a perfect negative correlation between ranks 15 | // - 0 = no correlation between ranks. 16 | type SpearmanCorrelation struct { 17 | current, target *TimeSeries 18 | } 19 | 20 | type rank struct{ x, y, xRank, yRank float64 } 21 | 22 | // NewSpearmanCorrelation returns an instance of the spearman correlation struct. 23 | func NewSpearmanCorrelation(current, target *TimeSeries) *SpearmanCorrelation { 24 | return &SpearmanCorrelation{current, target} 25 | } 26 | 27 | // Run runs the spearman correlation on the current and target time series. 28 | func (sc *SpearmanCorrelation) Run() float64 { 29 | // Build up the ranks slice 30 | ranks := make([]rank, sc.current.Size()) 31 | for index, currentValue := range sc.current.Values { 32 | ranks[index] = rank{x: currentValue, y: sc.target.Values[index]} 33 | } 34 | 35 | // Sort the ranks by x 36 | sort.Slice(ranks, func(i, j int) bool { return ranks[i].x < ranks[j].x }) 37 | 38 | // Rank the current series 39 | for pos := 0; pos < len(ranks); pos++ { 40 | ranks[pos].xRank = float64(pos) + 1 41 | duplicateValues := []int{pos} 42 | for nested, p := range ranks { 43 | if ranks[pos].x == p.x { 44 | if pos != nested { 45 | duplicateValues = append(duplicateValues, nested) 46 | } 47 | } 48 | } 49 | 50 | sum := SumInts(duplicateValues) 51 | avg := float64(sum+len(duplicateValues)) / float64(len(duplicateValues)) 52 | ranks[pos].xRank = avg 53 | for index := 1; index < len(duplicateValues); index++ { 54 | ranks[duplicateValues[index]].xRank = avg 55 | } 56 | pos += len(duplicateValues) - 1 57 | } 58 | 59 | // Sort the ranks by y 60 | sort.Slice(ranks, func(i int, j int) bool { return ranks[i].y < ranks[j].y }) 61 | 62 | // Rank the target series 63 | for pos := 0; pos < len(ranks); pos++ { 64 | ranks[pos].yRank = float64(pos) + 1 65 | duplicateValues := []int{pos} 66 | for nested, p := range ranks { 67 | if ranks[pos].y == p.y { 68 | if pos != nested { 69 | duplicateValues = append(duplicateValues, nested) 70 | } 71 | } 72 | } 73 | 74 | sum := SumInts(duplicateValues) 75 | avg := float64(sum+len(duplicateValues)) / float64(len(duplicateValues)) 76 | ranks[pos].yRank = avg 77 | for index := 1; index < len(duplicateValues); index++ { 78 | ranks[duplicateValues[index]].yRank = avg 79 | } 80 | pos += len(duplicateValues) - 1 81 | } 82 | 83 | // Adapt both current and target series 84 | for index, rank := range ranks { 85 | sc.current.Values[index] = rank.xRank 86 | sc.target.Values[index] = rank.yRank 87 | } 88 | 89 | return NewPearsonCorrelation(sc.current, sc.target).Run() 90 | } 91 | 92 | func (sc *SpearmanCorrelation) sanityCheck() error { 93 | if sc.current.Size() < 3 || sc.current.Size() != sc.target.Size() { 94 | return errors.New("current and/or target series have an invalid dimension") 95 | } 96 | return nil 97 | } 98 | -------------------------------------------------------------------------------- /helpers.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "sort" 5 | "sync" 6 | ) 7 | 8 | type mapper func(float64) float64 9 | type mapperWithIndex func(int, float64) float64 10 | type predicate func(float64) bool 11 | 12 | func minMax(data []float64) (float64, float64) { 13 | var ( 14 | max = data[0] 15 | min = data[0] 16 | ) 17 | for _, value := range data { 18 | if max < value { 19 | max = value 20 | } 21 | if min > value { 22 | min = value 23 | } 24 | } 25 | return min, max 26 | } 27 | 28 | func mapSlice(slice []float64, m mapper) []float64 { 29 | var ( 30 | wg sync.WaitGroup 31 | result = make([]float64, len(slice)) 32 | ) 33 | 34 | wg.Add(len(slice)) 35 | for i, value := range slice { 36 | go func(i int, value float64) { 37 | defer wg.Done() 38 | result[i] = m(value) 39 | }(i, value) 40 | } 41 | wg.Wait() 42 | 43 | return result 44 | } 45 | 46 | func mapSliceWithIndex(slice []float64, m mapperWithIndex) []float64 { 47 | var ( 48 | wg sync.WaitGroup 49 | result = make([]float64, len(slice)) 50 | ) 51 | 52 | wg.Add(len(slice)) 53 | for idx, value := range slice { 54 | go func(idx int, value float64) { 55 | defer wg.Done() 56 | result[idx] = m(idx, value) 57 | }(idx, value) 58 | } 59 | wg.Wait() 60 | 61 | return result 62 | } 63 | 64 | func filter(slice []float64, predicate predicate) (ret []float64) { 65 | for _, value := range slice { 66 | if predicate(value) { 67 | ret = append(ret, value) 68 | } 69 | } 70 | return 71 | } 72 | 73 | func copySlice(input []float64) []float64 { 74 | s := make([]float64, len(input)) 75 | copy(s, input) 76 | return s 77 | } 78 | 79 | func sortedCopy(input []float64) (copy []float64) { 80 | copy = copySlice(input) 81 | sort.Float64s(copy) 82 | return 83 | } 84 | 85 | func insertAt(slice []float64, pos int, elem float64) []float64 { 86 | if pos < 0 { 87 | pos = 0 88 | } else if pos >= len(slice) { 89 | pos = len(slice) 90 | } 91 | out := make([]float64, len(slice)+1) 92 | copy(out[:pos], slice[:pos]) 93 | out[pos] = elem 94 | copy(out[pos+1:], slice[pos:]) 95 | return out 96 | } 97 | 98 | func mapIntKeys(dict map[int]float64) []int { 99 | keys := make([]int, len(dict)) 100 | i := 0 101 | for key := range dict { 102 | keys[i] = key 103 | i++ 104 | } 105 | sort.Ints(keys) 106 | return keys 107 | } 108 | 109 | func mapFloat64Keys(m map[float64]float64) []float64 { 110 | keys := make([]float64, len(m)) 111 | i := 0 112 | for key := range m { 113 | keys[i] = key 114 | i++ 115 | } 116 | sort.Float64s(keys) 117 | return keys 118 | } 119 | 120 | func indexOf(slice []float64, value float64) int { 121 | for idx := range slice { 122 | if slice[idx] == value { 123 | return idx 124 | } 125 | } 126 | return -1 127 | } 128 | 129 | func unpackMap(m map[float64]float64) ([]float64, []float64) { 130 | keys := mapFloat64Keys(m) 131 | values := make([]float64, len(keys)) 132 | for idx, key := range keys { 133 | values[idx] = m[key] 134 | } 135 | return keys, values 136 | } 137 | 138 | func sumOfSquares(s []float64) float64 { 139 | sum := 0.0 140 | for _, val := range s { 141 | sum += val * val 142 | } 143 | return sum 144 | } 145 | 146 | func sumOfProducts(s1 []float64, s2 []float64) float64 { 147 | sum := 0.0 148 | for i := range s1 { 149 | sum += s1[i] * s2[i] 150 | } 151 | return sum 152 | } 153 | -------------------------------------------------------------------------------- /math.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "math" 5 | "math/rand" 6 | ) 7 | 8 | // Average returns the average of the input 9 | func Average(input []float64) float64 { 10 | return SumFloat64s(input) / float64(len(input)) 11 | } 12 | 13 | // SumFloat64s returns the sum of all float64 in the input 14 | func SumFloat64s(input []float64) float64 { 15 | var sum float64 16 | for _, value := range input { 17 | sum += value 18 | } 19 | return sum 20 | } 21 | 22 | // SumInts returns the sum of all integers in the input. 23 | func SumInts(input []int) int { 24 | var sum int 25 | for _, value := range input { 26 | sum += value 27 | } 28 | return sum 29 | } 30 | 31 | // Variance returns the variance of the input 32 | func Variance(input []float64) (variance float64) { 33 | avg := Average(input) 34 | for _, value := range input { 35 | variance += (value - avg) * (value - avg) 36 | } 37 | return variance / float64(len(input)) 38 | } 39 | 40 | // Stdev returns the standard deviation of the input 41 | func Stdev(input []float64) float64 { 42 | variance := Variance(input) 43 | return math.Pow(variance, 0.5) 44 | } 45 | 46 | // RoundFloat rounds float to closest int 47 | func RoundFloat(num float64) int { 48 | return int(num + math.Copysign(0.5, num)) 49 | } 50 | 51 | // Float64WithPrecision rounds float to certain precision 52 | func Float64WithPrecision(num float64, precision int) float64 { 53 | output := math.Pow(10, float64(precision)) 54 | return float64(RoundFloat(num*output)) / output 55 | } 56 | 57 | // Pdf returns the probability density function 58 | func Pdf(mean, stdev float64) func(float64) float64 { 59 | return func(x float64) float64 { 60 | numexp := math.Pow(x-mean, 2) / (2 * math.Pow(stdev, 2)) 61 | denom := stdev * math.Sqrt(2*math.Pi) 62 | numer := math.Pow(math.E, numexp*-1) 63 | return numer / denom 64 | } 65 | } 66 | 67 | // Cdf returns the cumulative distribution function 68 | func Cdf(mean, stdev float64) func(float64) float64 { 69 | return func(x float64) float64 { 70 | return 0.5 * (1.0 + Erf((x-mean)/(stdev*math.Sqrt(2.0)))) 71 | } 72 | } 73 | 74 | // Erf is the guassian error function 75 | func Erf(x float64) float64 { 76 | // Constants 77 | a1 := 0.254829592 78 | a2 := -0.284496736 79 | a3 := 1.421413741 80 | a4 := -1.453152027 81 | a5 := 1.061405429 82 | p := 0.3275911 83 | 84 | // Save the sign of x 85 | var sign float64 86 | if x < 0.0 { 87 | sign = -1.0 88 | } else { 89 | sign = 1.0 90 | } 91 | x = math.Abs(x) 92 | 93 | // Formula 7.1.26 given in Abramowitz and Stegun 94 | t := 1.0 / (1.0 + p*x) 95 | y := 1.0 - ((((a5*t+a4)*t+a3)*t+a2)*t+a1)*t*math.Pow(math.E, -x*x) 96 | return sign * y 97 | } 98 | 99 | // Ema returns the exponnential moving average of the input 100 | func Ema(input []float64, smoothingFactor float64) []float64 { 101 | ema := make([]float64, 0, len(input)) 102 | if len(input) > 0 { 103 | ema = append(ema, input[0]) 104 | } 105 | 106 | for i := 1; i < len(input); i++ { 107 | ema = append(ema, smoothingFactor*input[i]+(1-smoothingFactor)*ema[i-1]) 108 | } 109 | return ema 110 | } 111 | 112 | // RandomSineValue returns sine of value between [0, limit] using a rand source 113 | func RandomSineValue(rand *rand.Rand, limit int) float64 { 114 | var ( 115 | frequency = 440 116 | samplingRate = 44100 117 | signalAmplitude = 10 118 | ) 119 | x := 2 * math.Pi * float64(frequency*rand.Intn(limit)) / float64(samplingRate) 120 | return math.Abs(math.Sin(x) * float64(signalAmplitude)) 121 | } 122 | 123 | // AbsInt returns the absolute value of an integer. 124 | func AbsInt(x int) int { 125 | if x < 0 { 126 | return -x 127 | } 128 | return x 129 | } 130 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # anomalia 2 | 3 | `anomalia` is a lightweight Go library for Time Series data analysis. 4 | 5 | It supports anomaly detection and correlation. The API is simple and configurable in the sense that you can choose which algorithm suits your needs for anomaly detection and correlation. 6 | 7 | :construction: **The library is currently under development so things might move or change!** 8 | 9 | ## Installation 10 | 11 | Installation is done using `go get`: 12 | 13 | ```shell 14 | go get -u github.com/project-anomalia/anomalia 15 | ``` 16 | 17 | ## Supported Go Versions 18 | 19 | `anomalia` supports `Go >= 1.10`. 20 | 21 | ## Documentation 22 | 23 | ### Quick Start Guide 24 | 25 | This is a simple example to get you up & running with the library: 26 | 27 | ```go 28 | package main 29 | 30 | import ( 31 | "fmt" 32 | "github.com/project-anomalia/anomalia" 33 | ) 34 | 35 | func main() { 36 | // Load the time series from an external source. 37 | // It returns an instance of TimeSeries struct which holds the timestamps and their values. 38 | timeSeries := anomalia.NewTimeSeriesFromCSV("testdata/co2.csv") 39 | 40 | // Instantiate the default detector which uses a threshold to determines anomalies. 41 | // Anomalies are data points that have a score above the threshold (2.5 in this case). 42 | detector := anomalia.NewDetector(timeSeries).Threshold(2.5) 43 | 44 | // Calculate the scores for each data point in the time series 45 | scores := detector.GetScores() 46 | 47 | // Find anomalies based the calculated scores 48 | anomalies := detector.GetAnomalies(scores) 49 | 50 | // Iterate over detected anomalies and print their exact timestamp and value. 51 | for _, anomaly := range anomalies { 52 | fmt.Println(anomaly.Timestamp, ",", anomaly.Value) 53 | } 54 | } 55 | ``` 56 | 57 | The example above uses some preset algorithms to calculate the scores. It might not be suited for your case but you can 58 | use any of the available algorithms. 59 | 60 | All algorithms follow a straightforward design so you could get the scores based on your configuration and understanding 61 | of the data, and pass those scores to `Detector.GetAnomalies(*ScoreList)` function. 62 | 63 | 64 | And another example to check if two time series have a relationship or correlated: 65 | 66 | ```go 67 | package main 68 | 69 | import "github.com/project-anomalia/anomalia" 70 | 71 | func main() { 72 | a := anomalia.NewTimeSeriesFromCSV("testdata/co2.csv") 73 | b := anomalia.NewTimeSeriesFromCSV("testdata/airline-passengers.csv") 74 | 75 | // If the time series data points do not follow a certain distribution, 76 | // we use the Spearman correlator. 77 | coefficient := anomalia.NewCorrelator(a, b).CorrelationMethod(anomalia.SpearmanRank, nil).Run() 78 | 79 | // If the coefficient is above a certain threshold (0.7 for example), we consider 80 | // the time series correlated. 81 | if coefficient < 0.7 { 82 | panic("no relationship between the two time series") 83 | } 84 | } 85 | ``` 86 | 87 | If the correlation algorithm accepts any additional parameters (see different implementations), you can pass them as a 88 | `float64` slice to the `CorrelationMethod(method, options)` method. 89 | 90 | ## Roadmap 91 | 92 | - CLI tool for rapid experimentation 93 | - Benchmarks 94 | 95 | ## Resources 96 | 97 | TODO 98 | 99 | ## License 100 | 101 | ```text 102 | Copyright 2019 Faissal Elamraoui 103 | 104 | Licensed under the Apache License, Version 2.0 (the "License"); 105 | you may not use this file except in compliance with the License. 106 | You may obtain a copy of the License at 107 | 108 | http://www.apache.org/licenses/LICENSE-2.0 109 | 110 | Unless required by applicable law or agreed to in writing, software 111 | distributed under the License is distributed on an "AS IS" BASIS, 112 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 113 | See the License for the specific language governing permissions and 114 | limitations under the License. 115 | ``` 116 | -------------------------------------------------------------------------------- /time_series_test.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "math/big" 5 | "reflect" 6 | "testing" 7 | ) 8 | 9 | var ( 10 | timestamps = []float64{2, 3, 5, 8, 9, 10, 15} 11 | values = []float64{1.0, 0.6, 2.5, 1.9, 0.3, 3.2, 0} 12 | ) 13 | 14 | func TestNewTimeSeries(t *testing.T) { 15 | ts := NewTimeSeries(timestamps, values) 16 | actualType := "*anomalia.TimeSeries" 17 | expectedType := reflect.TypeOf(ts).String() 18 | if expectedType != actualType { 19 | t.Fatalf("expected '%s', got '%s'", expectedType, actualType) 20 | } 21 | 22 | // Assert panic 23 | defer func() { 24 | if r := recover(); r == nil { 25 | t.Errorf("NewTimeSeries did not panic") 26 | } 27 | }() 28 | NewTimeSeries([]float64{1, 2}, []float64{1}) 29 | } 30 | 31 | func TestEarliestTimestamp(t *testing.T) { 32 | timestamp := NewTimeSeries(timestamps, values).EarliestTimestamp() 33 | actual := big.NewFloat(timestamp) 34 | expected := big.NewFloat(2.0) 35 | if actual.Cmp(expected) != 0 { 36 | t.Fatalf("expected '%f', got '%f'", expected, actual) 37 | } 38 | } 39 | 40 | func TestLastestTimestamp(t *testing.T) { 41 | timestamp := NewTimeSeries(timestamps, values).LastestTimestamp() 42 | actual := big.NewFloat(timestamp) 43 | expected := big.NewFloat(15.0) 44 | if actual.Cmp(expected) != 0 { 45 | t.Fatalf("expected '%f', got '%f'", expected, actual) 46 | } 47 | } 48 | 49 | func TestZipTimeSeries(t *testing.T) { 50 | ts := NewTimeSeries(timestamps, values) 51 | expected := ts.Zip() 52 | if (len(expected) != len(ts.Timestamps)) || (len(expected) != len(ts.Values)) { 53 | t.Fatalf("time series lengths do not match") 54 | } 55 | } 56 | 57 | func TestAddOffsetToTimeSeries(t *testing.T) { 58 | ts := NewTimeSeries(timestamps, values) 59 | offsetted := ts.AddOffset(1) 60 | if len(ts.Timestamps) != len(offsetted.Timestamps) { 61 | t.Fatalf("offsetted time series length do not match") 62 | } 63 | } 64 | 65 | func TestNormalize(t *testing.T) { 66 | ts := NewTimeSeries(timestamps, values).Normalize() 67 | if ts == nil { 68 | t.Fatalf("normalized time series cannot be nil") 69 | } 70 | } 71 | 72 | func TestNormalizeWithMinMax(t *testing.T) { 73 | ts := NewTimeSeries(timestamps, values).NormalizeWithMinMax() 74 | if ts == nil { 75 | t.Fatalf("minMax normalized time series cannot be nil") 76 | } 77 | } 78 | 79 | func TestCrop(t *testing.T) { 80 | ts := NewTimeSeries(timestamps, values).Crop(0, 4) 81 | if len(ts.Timestamps) != 2 || len(ts.Values) != 2 { 82 | t.Fatalf("expected size to be 2, got %v", len(ts.Timestamps)) 83 | } 84 | } 85 | 86 | func TestTimeSeriesAverage(t *testing.T) { 87 | actual := Float64WithPrecision(NewTimeSeries(timestamps, values).Average(), 2) 88 | expected := Float64WithPrecision(1.36, 2) 89 | if actual != expected { 90 | t.Fatalf("expected %f, got %f", expected, actual) 91 | } 92 | } 93 | 94 | func TestMedian(t *testing.T) { 95 | ts := NewTimeSeries(timestamps, values) 96 | actual := big.NewFloat(ts.Median()) 97 | expected := big.NewFloat(1.0) 98 | if actual.Cmp(expected) != 0 { 99 | t.Fatalf("expected %f, got %f", expected, actual) 100 | } 101 | 102 | ts = ts.Crop(0, 8) 103 | actual = big.NewFloat(ts.Median()) 104 | expected = big.NewFloat(1.45) 105 | if actual.Cmp(expected) != 0 { 106 | t.Fatalf("expected %f, got %f", expected, actual) 107 | } 108 | } 109 | 110 | func TestAlign1(t *testing.T) { 111 | ts := NewTimeSeries([]float64{4, 5, 6, 7, 8, 15}, []float64{1.2, 0, 1, 0.5, 4, 7}) 112 | otherTs := NewTimeSeries([]float64{1, 2, 3}, []float64{0.9, 10.1, 5.4}) 113 | 114 | ts.Align(otherTs) 115 | 116 | if ts.Size() != otherTs.Size() { 117 | t.Fatalf("time series size mismatch") 118 | } 119 | } 120 | 121 | func TestAlign2(t *testing.T) { 122 | ts := NewTimeSeries([]float64{1, 2, 3, 4}, []float64{0.1, 0.2, 0.3, 9.8}) 123 | otherTs := NewTimeSeries([]float64{4, 5, 6, 7, 8, 15}, []float64{1.2, 0, 1, 0.5, 4, 7}) 124 | 125 | ts.Align(otherTs) 126 | 127 | if ts.Size() != otherTs.Size() { 128 | t.Fatalf("time series size mismatch") 129 | } 130 | } 131 | 132 | func TestString(t *testing.T) { 133 | json := (&TimeSeries{[]float64{1, 2, 3}, []float64{1.5, 0, 1}}).String() 134 | expected := "{\"Timestamps\":[1,2,3],\"Values\":[1.5,0,1]}" 135 | if json != expected { 136 | t.Fatalf("incorrect time series string representation") 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/chewxy/hm v1.0.0 h1:zy/TSv3LV2nD3dwUEQL2VhXeoXbb9QkpmdRAVUFiA6k= 2 | github.com/chewxy/hm v1.0.0/go.mod h1:qg9YI4q6Fkj/whwHR1D+bOGeF7SniIP40VweVepLjg0= 3 | github.com/chewxy/math32 v1.0.0 h1:RTt2SACA7BTzvbsAKVQJLZpV6zY2MZw4bW9L2HEKkHg= 4 | github.com/chewxy/math32 v1.0.0/go.mod h1:Miac6hA1ohdDUTagnvJy/q+aNnEk16qWUdb8ZVhvCN0= 5 | github.com/chewxy/stl v1.3.1 h1:5IblZvGtZGzYL2xN6ZJ1uIyKauYmKK42axeeHQRf5ig= 6 | github.com/chewxy/stl v1.3.1/go.mod h1:rahF/zUIXOWT3RDM7k/ytPYqf+gSbUh+Px6EyhE4R0M= 7 | github.com/gogo/protobuf v1.2.1 h1:/s5zKNz0uPFCZ5hddgPdo2TK2TVrUNMn0OOX8/aZMTE= 8 | github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= 9 | github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= 10 | github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= 11 | github.com/google/flatbuffers v1.11.0 h1:G1UwqvkT/emTpyR05i/JSS7K1VjNGxsFDg/zwRVcJvw= 12 | github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= 13 | github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= 14 | github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= 15 | github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= 16 | github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= 17 | github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= 18 | github.com/project-anomalia/stl v1.3.1 h1:tSvE68Uk6dj3hGK5ocnQbzqMcuQF96XLze+LXm2ubdc= 19 | github.com/project-anomalia/stl v1.3.1/go.mod h1:/S7X3u1BXlL5Po+Z71paWUrb3ABB6AUxl6S3X4L5pPg= 20 | github.com/xtgo/set v1.0.0 h1:6BCNBRv3ORNDQ7fyoJXRv+tstJz3m1JVFQErfeZz2pY= 21 | github.com/xtgo/set v1.0.0/go.mod h1:d3NHzGzSa0NmB2NhFyECA+QdRp29oEn2xbT+TpeFoM8= 22 | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= 23 | golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= 24 | golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= 25 | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= 26 | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= 27 | golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= 28 | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= 29 | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= 30 | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 31 | golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 32 | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= 33 | golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= 34 | golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 35 | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 36 | golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 37 | golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 38 | golang.org/x/tools v0.0.0-20190703172252-a00916dd39a5/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= 39 | gonum.org/v1/gonum v0.0.0-20190628223043-536a303fd62f h1:XQilCFQ9tBhWe+GOeWGaO9W9iLIjcQDkSB8a5t4bT1U= 40 | gonum.org/v1/gonum v0.0.0-20190628223043-536a303fd62f/go.mod h1:03dgh78c4UvU1WksguQ/lvJQXbezKQGJSrwwRq5MraQ= 41 | gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= 42 | gorgonia.org/tensor v0.8.1 h1:PTJ81ku5uYs/qsZLMFq02q0DWI4YuJeu0ikieFkkh1o= 43 | gorgonia.org/tensor v0.8.1/go.mod h1:05Y4laKuVlj4qFoZIZW1q/9n1jZkgDBOLmKXZdBLG1w= 44 | gorgonia.org/vecf32 v0.7.0 h1:mkpVzSyT7/Cput5/ZxaMzzp2xbmOtqOyJlTf7AdSMe0= 45 | gorgonia.org/vecf32 v0.7.0/go.mod h1:iHG+kvTMqGYA0SgahfO2k62WRnxmHsqAREGbayRDzy8= 46 | gorgonia.org/vecf64 v0.7.0 h1:ZphOGJfnWlFfY7x8WAJAfO64IAtYqPPq9TEGem+ItZE= 47 | gorgonia.org/vecf64 v0.7.0/go.mod h1:1y4pmcSd+wh3phG+InwWQjYrqwyrtN9h27WLFVQfV1Q= 48 | -------------------------------------------------------------------------------- /cross_correlation.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "errors" 5 | "math" 6 | ) 7 | 8 | // CrossCorrelation holds Cross Correlation algorithm parameters and settings. 9 | // It is calculated by multiplying and summing the current and target time series together. 10 | // 11 | // This implementation uses normalized time series which makes scoring easy to understand: 12 | // - The higher the coefficient, the higher the correlation is. 13 | // - The maximum value of the correlation coefficient is 1. 14 | // - The minimum value of the correlation coefficient is -1. 15 | // - Two time series are exactly the same when their correlation coefficient is equal to 1. 16 | type CrossCorrelation struct { 17 | current, target *TimeSeries 18 | maxShift float64 19 | impact float64 20 | } 21 | 22 | // CorrelationResult holds detected correlation result. 23 | type CorrelationResult struct { 24 | Shift float64 25 | Coefficient float64 26 | ShiftedCoefficient float64 27 | } 28 | 29 | // NewCrossCorrelation returns an instance of the cross correlation struct. 30 | func NewCrossCorrelation(current *TimeSeries, target *TimeSeries) *CrossCorrelation { 31 | return &CrossCorrelation{ 32 | current: current, 33 | target: target, 34 | maxShift: 60 * 1000, 35 | impact: 0.05, 36 | } 37 | } 38 | 39 | // MaxShift sets the maximal shift in seconds. 40 | func (cc *CrossCorrelation) MaxShift(shift float64) *CrossCorrelation { 41 | cc.maxShift = shift * 1000 42 | return cc 43 | } 44 | 45 | // Impact sets impact of shift on shifted correlation coefficient. 46 | func (cc *CrossCorrelation) Impact(impact float64) *CrossCorrelation { 47 | cc.impact = impact 48 | return cc 49 | } 50 | 51 | // GetCorrelationResult runs the cross correlation algorithm. 52 | func (cc *CrossCorrelation) GetCorrelationResult() CorrelationResult { 53 | return cc.detectCorrelation() 54 | } 55 | 56 | // Run runs the cross correlation algorithm and returns only the coefficient. 57 | func (cc *CrossCorrelation) Run() float64 { 58 | return cc.GetCorrelationResult().Coefficient 59 | } 60 | 61 | func (cc *CrossCorrelation) sanityCheck() error { 62 | if cc.current.Size() < 2 || cc.target.Size() < 2 { 63 | return errors.New("not enough data points") 64 | } 65 | return nil 66 | } 67 | 68 | func (cc *CrossCorrelation) detectCorrelation() CorrelationResult { 69 | cc.current, cc.target = cc.current.Normalize(), cc.target.Normalize() 70 | cc.current.Align(cc.target) 71 | 72 | correlations := make([][]float64, 0) 73 | shiftedCorrelations := make([]float64, 0) 74 | 75 | currentValues, targetValues := cc.current.Values, cc.target.Values 76 | currentAvg, targetAvg := cc.current.Average(), cc.target.Average() 77 | currentStdev, targetStdev := cc.current.Stdev(), cc.target.Stdev() 78 | 79 | n := cc.current.Size() 80 | denom := currentStdev * targetStdev * float64(n) 81 | allowedShiftStep := findMaxAllowedShift(cc.current.Timestamps, cc.maxShift) 82 | 83 | var shiftLowerBound, shiftUpperBound int 84 | if allowedShiftStep != -1 { 85 | shiftLowerBound = -allowedShiftStep 86 | shiftUpperBound = allowedShiftStep 87 | } else { 88 | shiftLowerBound = 0 89 | shiftUpperBound = 1 90 | } 91 | 92 | for delay := shiftLowerBound; delay < shiftUpperBound; delay++ { 93 | _delay := math.Abs(cc.current.Timestamps[AbsInt(delay)] - cc.current.Timestamps[0]) 94 | sum := 0.0 95 | for i := 0; i < n; i++ { 96 | j := i + delay 97 | if j < 0 || j >= n { 98 | continue 99 | } else { 100 | sum += (currentValues[i] - currentAvg) * (targetValues[j] - targetAvg) 101 | } 102 | } 103 | 104 | // Calculate correlation coefficient 105 | r := sum 106 | if denom != 0 { 107 | r = sum / denom 108 | } 109 | correlations = append(correlations, []float64{_delay, r}) 110 | 111 | // Take into account the maximal shift 112 | if cc.maxShift > 0 { 113 | r *= 1 + _delay/float64(cc.maxShift)*cc.impact 114 | } 115 | shiftedCorrelations = append(shiftedCorrelations, r) 116 | } 117 | 118 | maxCorrelation := findMaxCorrelation(correlations) 119 | _, maxShiftedCorrelation := minMax(shiftedCorrelations) 120 | return CorrelationResult{ 121 | Shift: maxCorrelation[0], 122 | Coefficient: maxCorrelation[1], 123 | ShiftedCoefficient: maxShiftedCorrelation, 124 | } 125 | } 126 | 127 | func findMaxAllowedShift(timestamps []float64, target float64) int { 128 | initialTimestamp := timestamps[0] 129 | residualTimestamps := mapSlice(timestamps, func(ts float64) float64 { 130 | return ts - initialTimestamp 131 | }) 132 | // Find the first element in timestamps whose value is bigger than target. 133 | pos := -1 134 | lowerBound, upperBound := 0, len(residualTimestamps) 135 | for lowerBound < upperBound { 136 | pos = int(lowerBound + (upperBound-lowerBound)/2) 137 | if timestamps[pos] > target { 138 | upperBound = pos 139 | } else { 140 | lowerBound = pos + 1 141 | } 142 | } 143 | return pos 144 | } 145 | 146 | func findMaxCorrelation(data [][]float64) []float64 { 147 | max := data[0] 148 | for _, slice := range data { 149 | if slice[1] > max[1] { 150 | max = slice 151 | } 152 | } 153 | return max 154 | } 155 | -------------------------------------------------------------------------------- /bitmap.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "errors" 5 | "math" 6 | "strconv" 7 | "strings" 8 | ) 9 | 10 | const minimalPointsInWindows = 50 11 | 12 | // Bitmap holds bitmap algorithm configuration. 13 | // 14 | // The Bitmap algorithm breaks the time series into chunks and uses 15 | // the frequency of similar chunks to determine anomalies scores. 16 | // The scoring happens by sliding both lagging and future windows. 17 | type Bitmap struct { 18 | chunkSize int 19 | precision int 20 | lagWindowSize int 21 | futureWindowSize int 22 | } 23 | 24 | // NewBitmap returns Bitmap instance. 25 | func NewBitmap() *Bitmap { 26 | return &Bitmap{ 27 | chunkSize: 2, 28 | precision: 4, 29 | lagWindowSize: 0, 30 | futureWindowSize: 0, 31 | } 32 | } 33 | 34 | // ChunkSize sets the chunk size to use (defaults to 2). 35 | func (b *Bitmap) ChunkSize(size int) *Bitmap { 36 | b.chunkSize = size 37 | return b 38 | } 39 | 40 | // Precision sets the precision. 41 | func (b *Bitmap) Precision(p int) *Bitmap { 42 | b.precision = p 43 | return b 44 | } 45 | 46 | // LagWindowSize sets the lag window size (defaults to 0). 47 | func (b *Bitmap) LagWindowSize(size int) *Bitmap { 48 | b.lagWindowSize = size 49 | return b 50 | } 51 | 52 | // FutureWindowSize sets the future window size (default to 0). 53 | func (b *Bitmap) FutureWindowSize(size int) *Bitmap { 54 | b.futureWindowSize = size 55 | return b 56 | } 57 | 58 | // Run runs the bitmap algorithm over the time series 59 | func (b *Bitmap) Run(timeSeries *TimeSeries) *ScoreList { 60 | scoreList, _ := b.computeScores(timeSeries) 61 | return scoreList 62 | } 63 | 64 | func (b *Bitmap) computeScores(timeSeries *TimeSeries) (*ScoreList, error) { 65 | // Update both lagging and future windows size 66 | b.lagWindowSize = int(0.0125 * float64(len(timeSeries.Timestamps))) 67 | b.futureWindowSize = int(0.0125 * float64(len(timeSeries.Timestamps))) 68 | 69 | // Perform sanity check 70 | if _, err := b.sanityCheck(timeSeries); err != nil { 71 | return nil, err 72 | } 73 | 74 | sax := b.generateSAX(timeSeries) 75 | laggingsMaps, futureMaps := b.constructAllSAXChunks(timeSeries, sax) 76 | dimension := timeSeries.Size() 77 | 78 | computeScoreBetweenTwoWindows := func(idx int) float64 { 79 | lagWindowChunk := laggingsMaps[idx] 80 | futureWindowChunk := futureMaps[idx] 81 | score := 0.0 82 | 83 | // Iterate over lagging window chunks 84 | for chunk := range lagWindowChunk { 85 | if _, ok := futureWindowChunk[chunk]; ok { 86 | score += math.Pow(float64(futureWindowChunk[chunk]-lagWindowChunk[chunk]), 2.0) 87 | } else { 88 | score += math.Pow(float64(lagWindowChunk[chunk]), 2.0) 89 | } 90 | } 91 | 92 | // Iterate over future window chunks 93 | for chunk := range futureWindowChunk { 94 | if _, ok := lagWindowChunk[chunk]; !ok { 95 | score += math.Pow(float64(futureWindowChunk[chunk]), 2) 96 | } 97 | } 98 | return score 99 | } 100 | 101 | scores := mapSliceWithIndex(timeSeries.Timestamps, func(idx int, timestamp float64) float64 { 102 | if (idx < b.lagWindowSize) || (idx > (dimension - b.futureWindowSize)) { 103 | return 0.0 104 | } 105 | return computeScoreBetweenTwoWindows(idx) 106 | }) 107 | 108 | scoreList := &ScoreList{timeSeries.Timestamps, scores} 109 | return scoreList, nil 110 | } 111 | 112 | // generateSAX generates the SAX representation of the time series values 113 | func (b *Bitmap) generateSAX(timeSeries *TimeSeries) BitmapBinary { 114 | sections := make(map[int]float64) 115 | min, max := minMax(timeSeries.Values) 116 | 117 | // Break the whole value range into different sections 118 | sectionHeight := (max - min) / float64(b.precision) 119 | for i := 0; i < b.precision; i++ { 120 | sections[i] = min + float64(i)*sectionHeight 121 | } 122 | 123 | // Generate SAX representation 124 | sectionsNumbers := mapIntKeys(sections) 125 | generateSingleSAX := func(value float64) string { 126 | sax := 0 127 | for _, sectionNumber := range sectionsNumbers { 128 | if value >= sections[sectionNumber] { 129 | sax = sectionNumber 130 | } else { 131 | break 132 | } 133 | } 134 | return strconv.Itoa(sax) 135 | } 136 | 137 | var saxBuilder strings.Builder 138 | for _, value := range timeSeries.Values { 139 | singleSAX := generateSingleSAX(value) 140 | saxBuilder.WriteString(singleSAX) 141 | } 142 | return BitmapBinary(saxBuilder.String()) 143 | } 144 | 145 | func (b *Bitmap) constructChunkFrequencyMap(sax BitmapBinary) map[BitmapBinary]int { 146 | frequencyMap := make(map[BitmapBinary]int) 147 | saxLength := sax.Len() 148 | for i := 0; i < saxLength; i++ { 149 | if i+b.chunkSize <= saxLength { 150 | chunk := sax.Slice(i, i+b.chunkSize) 151 | frequencyMap[chunk]++ 152 | } 153 | } 154 | return frequencyMap 155 | } 156 | 157 | func (b *Bitmap) constructAllSAXChunks(timeSeries *TimeSeries, sax BitmapBinary) (map[int]map[BitmapBinary]int, map[int]map[BitmapBinary]int) { 158 | laggingsMaps := make(map[int]map[BitmapBinary]int) 159 | futureMaps := make(map[int]map[BitmapBinary]int) 160 | lws := b.lagWindowSize 161 | fws := b.futureWindowSize 162 | chunkSize := b.chunkSize 163 | dimension := timeSeries.Size() 164 | 165 | var lwLeaveChunk, lwEnterChunk, fwLeaveChunk, fwEnterChunk BitmapBinary 166 | 167 | for i := 0; i < dimension; i++ { 168 | if (i < lws) || (i > dimension-fws) { 169 | laggingsMaps[i] = nil 170 | } else { 171 | if laggingsMaps[i-1] == nil { 172 | laggingsMaps[i] = b.constructChunkFrequencyMap(sax[i-lws : i]) 173 | lwLeaveChunk = sax.Slice(0, chunkSize) 174 | lwEnterChunk = sax.Slice(i-chunkSize+1, i+1) 175 | 176 | futureMaps[i] = b.constructChunkFrequencyMap(sax[i : i+fws]) 177 | fwLeaveChunk = sax.Slice(i, i+chunkSize) 178 | fwEnterChunk = sax.Slice(i+fws+1-chunkSize, i+fws+1) 179 | } else { 180 | lagMap := laggingsMaps[i-1] 181 | lagMap[lwLeaveChunk]-- 182 | lagMap[lwEnterChunk]++ 183 | laggingsMaps[i] = lagMap 184 | 185 | futureMap := futureMaps[i-1] 186 | futureMap[fwLeaveChunk]-- 187 | futureMap[fwEnterChunk]++ 188 | futureMaps[i] = futureMap 189 | 190 | // Update leave and enter chunks 191 | lwLeaveChunk = sax.Slice(i-lws, i-lws+chunkSize) 192 | lwEnterChunk = sax.Slice(i-chunkSize+1, i+1) 193 | fwLeaveChunk = sax.Slice(i, i+chunkSize) 194 | fwEnterChunk = sax.Slice(i+fws+1-chunkSize, i+fws+1) 195 | } 196 | } 197 | } 198 | return laggingsMaps, futureMaps 199 | } 200 | 201 | func (b *Bitmap) sanityCheck(timeSeries *TimeSeries) (*TimeSeries, error) { 202 | windowsDimension := b.lagWindowSize + b.futureWindowSize 203 | if (timeSeries.Size() < windowsDimension) || (windowsDimension < minimalPointsInWindows) { 204 | return nil, errors.New("not enough data points") 205 | } 206 | return timeSeries, nil 207 | } 208 | -------------------------------------------------------------------------------- /time_series.go: -------------------------------------------------------------------------------- 1 | package anomalia 2 | 3 | import ( 4 | "encoding/csv" 5 | "encoding/json" 6 | "os" 7 | "strconv" 8 | ) 9 | 10 | // TimeSeries wrapper for timestamps and their values 11 | type TimeSeries struct { 12 | Timestamps []float64 13 | Values []float64 14 | } 15 | 16 | // NewTimeSeries creates a new time series data structure 17 | func NewTimeSeries(timestamps []float64, values []float64) *TimeSeries { 18 | if len(timestamps) != len(values) { 19 | panic("timestamps and values must have the same size") 20 | } 21 | return &TimeSeries{ 22 | Timestamps: timestamps, 23 | Values: values, 24 | } 25 | } 26 | 27 | // NewTimeSeriesFromCSV create a new time series from a CSV file. 28 | func NewTimeSeriesFromCSV(path string) *TimeSeries { 29 | var ( 30 | timestamps []float64 31 | data []float64 32 | ) 33 | 34 | f, _ := os.Open(path) 35 | defer func() { 36 | if err := f.Close(); err != nil { 37 | panic(err) 38 | } 39 | }() 40 | 41 | r := csv.NewReader(f) 42 | 43 | _, _ = r.Read() // Read the header 44 | for rec, err := r.Read(); err == nil; rec, err = r.Read() { 45 | // Ignore errors because we assume the file to be correct 46 | if timestamp, err := strconv.ParseFloat(rec[0], 64); err == nil { 47 | timestamps = append(timestamps, timestamp) 48 | } 49 | if val, err := strconv.ParseFloat(rec[1], 64); err == nil { 50 | data = append(data, val) 51 | } 52 | } 53 | return NewTimeSeries(timestamps, data) 54 | } 55 | 56 | // EarliestTimestamp returns the earliest timestamp in the time series 57 | func (ts *TimeSeries) EarliestTimestamp() float64 { 58 | min, _ := minMax(ts.Timestamps) 59 | return min 60 | } 61 | 62 | // LastestTimestamp returns the latest timestamp in the time series 63 | func (ts *TimeSeries) LastestTimestamp() float64 { 64 | _, max := minMax(ts.Timestamps) 65 | return max 66 | } 67 | 68 | // Zip convert the time series to a map (map[Timestamp]Value) 69 | func (ts *TimeSeries) Zip() map[float64]float64 { 70 | m := make(map[float64]float64) 71 | sorted := sortedCopy(ts.Timestamps) 72 | 73 | for idx, timestamp := range sorted { 74 | m[timestamp] = ts.Values[idx] 75 | } 76 | return m 77 | } 78 | 79 | // AddOffset increments time series timestamps by some offset 80 | func (ts *TimeSeries) AddOffset(offset float64) *TimeSeries { 81 | offsettedTimestamps := mapSlice(ts.Timestamps, func(timestamp float64) float64 { return timestamp + offset }) 82 | return NewTimeSeries(offsettedTimestamps, ts.Values) 83 | } 84 | 85 | // Normalize normalizes the time series values by dividing by the maximum value 86 | func (ts *TimeSeries) Normalize() *TimeSeries { 87 | _, max := minMax(ts.Values) 88 | normalizedValues := mapSlice(ts.Values, func(value float64) float64 { return value / max }) 89 | return NewTimeSeries(ts.Timestamps, normalizedValues) 90 | } 91 | 92 | // NormalizeWithMinMax normalizes time series values using MixMax 93 | func (ts *TimeSeries) NormalizeWithMinMax() *TimeSeries { 94 | normalizedValues := ts.Values 95 | if min, max := minMax(ts.Values); min != max { 96 | normalizedValues = mapSlice(ts.Values, func(value float64) float64 { return value - min/max - min }) 97 | } 98 | return NewTimeSeries(ts.Timestamps, normalizedValues) 99 | } 100 | 101 | // Crop crops the time series timestamps into the specified range [start, end] 102 | func (ts *TimeSeries) Crop(start, end float64) *TimeSeries { 103 | zippedSeries := ts.Zip() 104 | // Filter timestamps within the crop range 105 | timestamps := filter(ts.Timestamps, func(timestamp float64) bool { 106 | return (timestamp >= start) && (timestamp <= end) 107 | }) 108 | 109 | // Get values of cropped timestamps 110 | values := make([]float64, 0, len(timestamps)) 111 | for _, timestamp := range timestamps { 112 | values = append(values, zippedSeries[timestamp]) 113 | } 114 | return NewTimeSeries(timestamps, values) 115 | } 116 | 117 | // Average calculates average value over the time series 118 | func (ts *TimeSeries) Average() float64 { 119 | return Average(ts.Values) 120 | } 121 | 122 | // Stdev calculates the standard deviation of the time series 123 | func (ts *TimeSeries) Stdev() float64 { 124 | return Stdev(ts.Values) 125 | } 126 | 127 | // Median calculates median value over the time series. 128 | func (ts *TimeSeries) Median() float64 { 129 | sorted := sortedCopy(ts.Values) 130 | length := len(sorted) 131 | mid := length / 2 132 | 133 | if length%2 == 0 { 134 | return (sorted[mid-1] + sorted[mid]) / 2 135 | } 136 | return sorted[mid] 137 | } 138 | 139 | // Align aligns two time series so that they have the same dimension and same timestamps 140 | func (ts *TimeSeries) Align(other *TimeSeries) { 141 | var ( 142 | it = NewIterator(ts.Timestamps) 143 | otherIt = NewIterator(other.Timestamps) 144 | zippedSeries = ts.Zip() 145 | zippedOtherSeries = other.Zip() 146 | aligned = make(map[float64]float64) 147 | otherAligned = make(map[float64]float64) 148 | ) 149 | 150 | timestamp, otherTimestamp := it.Next(), otherIt.Next() 151 | for timestamp != nil && otherTimestamp != nil { 152 | _timestamp, _otherTimestamp := *timestamp, *otherTimestamp 153 | _value, _otherValue := zippedSeries[_timestamp], zippedOtherSeries[_otherTimestamp] 154 | if _timestamp == _otherTimestamp { 155 | aligned[_timestamp] = _value 156 | otherAligned[_otherTimestamp] = _otherValue 157 | timestamp = it.Next() 158 | otherTimestamp = otherIt.Next() 159 | } else if _timestamp < _otherTimestamp { 160 | aligned[_timestamp] = _value 161 | otherAligned[_timestamp] = _otherValue 162 | timestamp = it.Next() 163 | } else { 164 | aligned[_otherTimestamp] = _value 165 | otherAligned[_otherTimestamp] = _otherValue 166 | otherTimestamp = otherIt.Next() 167 | } 168 | } 169 | 170 | // 171 | // Align remainder of timestamps 172 | // 173 | for timestamp != nil { 174 | _timestamp := *timestamp 175 | aligned[_timestamp] = zippedSeries[_timestamp] 176 | otherAligned[_timestamp] = other.Values[len(other.Values)-1] 177 | timestamp = it.Next() 178 | } 179 | 180 | for otherTimestamp != nil { 181 | _otherTimestamp := *otherTimestamp 182 | aligned[_otherTimestamp] = ts.Values[len(ts.Values)-1] 183 | otherAligned[_otherTimestamp] = zippedOtherSeries[_otherTimestamp] 184 | otherTimestamp = otherIt.Next() 185 | } 186 | 187 | // Adapt both the original and other time series 188 | alignedTimestamps, alignedValues := unpackMap(aligned) 189 | ts.Timestamps = alignedTimestamps 190 | ts.Values = alignedValues 191 | 192 | otherTimestamps, otherValues := unpackMap(otherAligned) 193 | other.Timestamps = otherTimestamps 194 | other.Values = otherValues 195 | } 196 | 197 | // Size returns the time series dimension/size. 198 | func (ts *TimeSeries) Size() int { 199 | return len(ts.Timestamps) 200 | } 201 | 202 | // String returns JSON representation of the time series 203 | func (ts *TimeSeries) String() string { 204 | out, err := json.Marshal(ts) 205 | if err != nil { 206 | panic(err) 207 | } 208 | return string(out) 209 | } 210 | -------------------------------------------------------------------------------- /testdata/co2.csv: -------------------------------------------------------------------------------- 1 | Date,CO2Levels 2 | 0,315.71 3 | 1,317.45 4 | 2,317.5 5 | 3,317.1 6 | 4,315.86 7 | 5,314.93 8 | 6,313.2 9 | 7,312.66 10 | 8,313.33 11 | 9,314.67 12 | 10,315.62 13 | 11,316.38 14 | 12,316.71 15 | 13,317.72 16 | 14,318.29 17 | 15,318.15 18 | 16,316.54 19 | 17,314.8 20 | 18,313.84 21 | 19,313.26 22 | 20,314.8 23 | 21,315.58 24 | 22,316.43 25 | 23,316.97 26 | 24,317.58 27 | 25,319.02 28 | 26,320.03 29 | 27,319.59 30 | 28,318.18 31 | 29,315.91 32 | 30,314.16 33 | 31,313.83 34 | 32,315 35 | 33,316.19 36 | 34,316.93 37 | 35,317.7 38 | 36,318.54 39 | 37,319.48 40 | 38,320.58 41 | 39,319.77 42 | 40,318.57 43 | 41,316.79 44 | 42,314.8 45 | 43,315.38 46 | 44,316.1 47 | 45,317.01 48 | 46,317.94 49 | 47,318.56 50 | 48,319.68 51 | 49,320.63 52 | 50,321.01 53 | 51,320.55 54 | 52,319.58 55 | 53,317.4 56 | 54,316.26 57 | 55,315.42 58 | 56,316.69 59 | 57,317.69 60 | 58,318.74 61 | 59,319.08 62 | 60,319.86 63 | 61,321.39 64 | 62,322.25 65 | 63,321.47 66 | 64,319.74 67 | 65,317.77 68 | 66,316.21 69 | 67,315.99 70 | 68,317.12 71 | 69,318.31 72 | 70,319.57 73 | 71,320.07 74 | 72,320.73 75 | 73,321.77 76 | 74,322.25 77 | 75,321.89 78 | 76,320.44 79 | 77,318.7 80 | 78,316.7 81 | 79,316.79 82 | 80,317.79 83 | 81,318.71 84 | 82,319.44 85 | 83,320.44 86 | 84,320.89 87 | 85,322.13 88 | 86,322.16 89 | 87,321.87 90 | 88,321.39 91 | 89,318.81 92 | 90,317.81 93 | 91,317.3 94 | 92,318.87 95 | 93,319.42 96 | 94,320.62 97 | 95,321.59 98 | 96,322.39 99 | 97,323.87 100 | 98,324.01 101 | 99,323.75 102 | 100,322.39 103 | 101,320.37 104 | 102,318.64 105 | 103,318.1 106 | 104,319.79 107 | 105,321.08 108 | 106,322.07 109 | 107,322.5 110 | 108,323.04 111 | 109,324.42 112 | 110,325 113 | 111,324.09 114 | 112,322.55 115 | 113,320.92 116 | 114,319.31 117 | 115,319.31 118 | 116,320.72 119 | 117,321.96 120 | 118,322.57 121 | 119,323.15 122 | 120,323.89 123 | 121,325.02 124 | 122,325.57 125 | 123,325.36 126 | 124,324.14 127 | 125,322.03 128 | 126,320.41 129 | 127,320.25 130 | 128,321.31 131 | 129,322.84 132 | 130,324 133 | 131,324.42 134 | 132,325.64 135 | 133,326.66 136 | 134,327.34 137 | 135,326.76 138 | 136,325.88 139 | 137,323.67 140 | 138,322.38 141 | 139,321.78 142 | 140,322.85 143 | 141,324.11 144 | 142,325.03 145 | 143,325.99 146 | 144,326.87 147 | 145,328.13 148 | 146,328.07 149 | 147,327.66 150 | 148,326.35 151 | 149,324.69 152 | 150,323.1 153 | 151,323.16 154 | 152,323.98 155 | 153,325.13 156 | 154,326.17 157 | 155,326.68 158 | 156,327.18 159 | 157,327.78 160 | 158,328.92 161 | 159,328.57 162 | 160,327.34 163 | 161,325.46 164 | 162,323.36 165 | 163,323.57 166 | 164,324.8 167 | 165,326.01 168 | 166,326.77 169 | 167,327.63 170 | 168,327.75 171 | 169,329.72 172 | 170,330.07 173 | 171,329.09 174 | 172,328.05 175 | 173,326.32 176 | 174,324.93 177 | 175,325.06 178 | 176,326.5 179 | 177,327.55 180 | 178,328.54 181 | 179,329.56 182 | 180,330.3 183 | 181,331.5 184 | 182,332.48 185 | 183,332.07 186 | 184,330.87 187 | 185,329.31 188 | 186,327.51 189 | 187,327.18 190 | 188,328.16 191 | 189,328.64 192 | 190,329.35 193 | 191,330.71 194 | 192,331.48 195 | 193,332.65 196 | 194,333.2 197 | 195,332.16 198 | 196,331.07 199 | 197,329.12 200 | 198,327.32 201 | 199,327.28 202 | 200,328.3 203 | 201,329.58 204 | 202,330.73 205 | 203,331.46 206 | 204,331.9 207 | 205,333.17 208 | 206,333.94 209 | 207,333.45 210 | 208,331.98 211 | 209,329.95 212 | 210,328.5 213 | 211,328.35 214 | 212,329.37 215 | 213,330.58 216 | 214,331.59 217 | 215,332.75 218 | 216,333.52 219 | 217,334.64 220 | 218,334.77 221 | 219,334 222 | 220,333.06 223 | 221,330.68 224 | 222,328.95 225 | 223,328.75 226 | 224,330.15 227 | 225,331.62 228 | 226,332.66 229 | 227,333.13 230 | 228,334.95 231 | 229,336.13 232 | 230,336.93 233 | 231,336.16 234 | 232,334.88 235 | 233,332.56 236 | 234,331.29 237 | 235,331.27 238 | 236,332.41 239 | 237,333.6 240 | 238,334.95 241 | 239,335.25 242 | 240,336.66 243 | 241,337.69 244 | 242,338.03 245 | 243,338.01 246 | 244,336.41 247 | 245,334.41 248 | 246,332.37 249 | 247,332.41 250 | 248,333.75 251 | 249,334.9 252 | 250,336.14 253 | 251,336.69 254 | 252,338.27 255 | 253,338.96 256 | 254,339.21 257 | 255,339.26 258 | 256,337.54 259 | 257,335.75 260 | 258,333.98 261 | 259,334.19 262 | 260,335.31 263 | 261,336.81 264 | 262,337.9 265 | 263,338.34 266 | 264,340.01 267 | 265,340.93 268 | 266,341.48 269 | 267,341.33 270 | 268,339.4 271 | 269,337.7 272 | 270,336.19 273 | 271,336.15 274 | 272,337.27 275 | 273,338.32 276 | 274,339.29 277 | 275,340.55 278 | 276,341.61 279 | 277,342.53 280 | 278,343.03 281 | 279,342.54 282 | 280,340.78 283 | 281,338.44 284 | 282,336.95 285 | 283,337.08 286 | 284,338.58 287 | 285,339.88 288 | 286,340.96 289 | 287,341.73 290 | 288,342.81 291 | 289,343.97 292 | 290,344.63 293 | 291,343.79 294 | 292,342.32 295 | 293,340.09 296 | 294,338.28 297 | 295,338.29 298 | 296,339.6 299 | 297,340.9 300 | 298,341.68 301 | 299,342.9 302 | 300,343.33 303 | 301,345.25 304 | 302,346.03 305 | 303,345.63 306 | 304,344.19 307 | 305,342.27 308 | 306,340.35 309 | 307,340.38 310 | 308,341.59 311 | 309,343.05 312 | 310,344.1 313 | 311,344.79 314 | 312,345.52 315 | 313,346.84 316 | 314,347.63 317 | 315,346.97 318 | 316,345.53 319 | 317,343.55 320 | 318,341.4 321 | 319,341.67 322 | 320,343.1 323 | 321,344.7 324 | 322,345.21 325 | 323,346.16 326 | 324,347.74 327 | 325,348.33 328 | 326,349.06 329 | 327,348.38 330 | 328,346.72 331 | 329,345.02 332 | 330,343.27 333 | 331,343.13 334 | 332,344.49 335 | 333,345.88 336 | 334,346.56 337 | 335,347.28 338 | 336,348.01 339 | 337,349.77 340 | 338,350.38 341 | 339,349.93 342 | 340,348.16 343 | 341,346.08 344 | 342,345.22 345 | 343,344.51 346 | 344,345.93 347 | 345,347.22 348 | 346,348.52 349 | 347,348.73 350 | 348,349.73 351 | 349,351.31 352 | 350,352.09 353 | 351,351.53 354 | 352,350.11 355 | 353,348.08 356 | 354,346.52 357 | 355,346.59 358 | 356,347.96 359 | 357,349.16 360 | 358,350.39 361 | 359,351.64 362 | 360,352.41 363 | 361,353.69 364 | 362,354.21 365 | 363,353.72 366 | 364,352.69 367 | 365,350.4 368 | 366,348.92 369 | 367,349.13 370 | 368,350.2 371 | 369,351.41 372 | 370,352.91 373 | 371,353.27 374 | 372,353.96 375 | 373,355.64 376 | 374,355.86 377 | 375,355.37 378 | 376,353.99 379 | 377,351.81 380 | 378,350.05 381 | 379,350.25 382 | 380,351.49 383 | 381,352.85 384 | 382,353.8 385 | 383,355.04 386 | 384,355.73 387 | 385,356.32 388 | 386,357.32 389 | 387,356.34 390 | 388,354.84 391 | 389,353.01 392 | 390,351.31 393 | 391,351.62 394 | 392,353.07 395 | 393,354.33 396 | 394,354.84 397 | 395,355.73 398 | 396,357.23 399 | 397,358.66 400 | 398,359.13 401 | 399,358.13 402 | 400,356.19 403 | 401,353.85 404 | 402,352.25 405 | 403,352.35 406 | 404,353.81 407 | 405,355.12 408 | 406,356.25 409 | 407,357.11 410 | 408,357.86 411 | 409,359.09 412 | 410,359.59 413 | 411,359.33 414 | 412,357.01 415 | 413,354.94 416 | 414,352.95 417 | 415,353.32 418 | 416,354.32 419 | 417,355.57 420 | 418,357 421 | 419,357.31 422 | 420,358.47 423 | 421,359.27 424 | 422,360.19 425 | 423,359.52 426 | 424,357.33 427 | 425,355.64 428 | 426,354.03 429 | 427,354.12 430 | 428,355.41 431 | 429,356.91 432 | 430,358.24 433 | 431,358.92 434 | 432,359.99 435 | 433,361.23 436 | 434,361.65 437 | 435,360.81 438 | 436,359.38 439 | 437,357.46 440 | 438,355.73 441 | 439,356.07 442 | 440,357.53 443 | 441,358.98 444 | 442,359.92 445 | 443,360.86 446 | 444,361.83 447 | 445,363.3 448 | 446,363.69 449 | 447,363.19 450 | 448,361.64 451 | 449,359.12 452 | 450,358.17 453 | 451,357.99 454 | 452,359.45 455 | 453,360.68 456 | 454,362.07 457 | 455,363.24 458 | 456,364.17 459 | 457,364.57 460 | 458,365.13 461 | 459,364.92 462 | 460,363.55 463 | 461,361.38 464 | 462,359.54 465 | 463,359.58 466 | 464,360.89 467 | 465,362.24 468 | 466,363.09 469 | 467,364.03 470 | 468,364.51 471 | 469,366.35 472 | 470,366.64 473 | 471,365.59 474 | 472,364.31 475 | 473,362.25 476 | 474,360.29 477 | 475,360.82 478 | 476,362.49 479 | 477,364.38 480 | 478,365.26 481 | 479,365.98 482 | 480,367.24 483 | 481,368.66 484 | 482,369.42 485 | 483,368.99 486 | 484,367.82 487 | 485,365.95 488 | 486,364.02 489 | 487,364.4 490 | 488,365.52 491 | 489,367.13 492 | 490,368.18 493 | 491,369.07 494 | 492,369.68 495 | 493,370.99 496 | 494,370.96 497 | 495,370.3 498 | 496,369.45 499 | 497,366.9 500 | 498,364.81 501 | 499,365.37 502 | 500,366.72 503 | 501,368.1 504 | 502,369.29 505 | 503,369.54 506 | 504,370.6 507 | 505,371.81 508 | 506,371.58 509 | 507,371.7 510 | 508,369.86 511 | 509,368.13 512 | 510,367 513 | 511,367.03 514 | 512,368.37 515 | 513,369.67 516 | 514,370.59 517 | 515,371.51 518 | 516,372.43 519 | 517,373.37 520 | 518,373.85 521 | 519,373.21 522 | 520,371.51 523 | 521,369.61 524 | 522,368.18 525 | 523,368.45 526 | 524,369.76 527 | 525,371.24 528 | 526,372.53 529 | 527,373.2 530 | 528,374.12 531 | 529,375.02 532 | 530,375.76 533 | 531,375.52 534 | 532,374.01 535 | 533,371.85 536 | 534,370.75 537 | 535,370.55 538 | 536,372.25 539 | 537,373.79 540 | 538,374.88 541 | 539,375.64 542 | 540,376.45 543 | 541,377.73 544 | 542,378.6 545 | 543,378.28 546 | 544,376.7 547 | 545,374.38 548 | 546,373.17 549 | 547,373.14 550 | 548,374.66 551 | 549,375.99 552 | 550,377 553 | 551,377.87 554 | 552,378.88 555 | 553,380.35 556 | 554,380.62 557 | 555,379.69 558 | 556,377.47 559 | 557,376.01 560 | 558,374.25 561 | 559,374.46 562 | 560,376.16 563 | 561,377.51 564 | 562,378.46 565 | 563,379.73 566 | 564,380.77 567 | 565,382.29 568 | 566,382.45 569 | 567,382.22 570 | 568,380.74 571 | 569,378.74 572 | 570,376.7 573 | 571,377 574 | 572,378.35 575 | 573,380.11 576 | 574,381.38 577 | 575,382.19 578 | 576,382.67 579 | 577,384.61 580 | 578,385.03 581 | 579,384.05 582 | 580,382.46 583 | 581,380.41 584 | 582,378.85 585 | 583,379.13 586 | 584,380.15 587 | 585,381.82 588 | 586,382.89 589 | 587,383.9 590 | 588,384.58 591 | 589,386.5 592 | 590,386.56 593 | 591,386.1 594 | 592,384.5 595 | 593,381.99 596 | 594,380.96 597 | 595,381.12 598 | 596,382.45 599 | 597,383.95 600 | 598,385.52 601 | 599,385.82 602 | 600,386.03 603 | 601,387.21 604 | 602,388.54 605 | 603,387.76 606 | 604,386.36 607 | 605,384.09 608 | 606,383.18 609 | 607,382.99 610 | 608,384.19 611 | 609,385.56 612 | 610,386.94 613 | 611,387.48 614 | 612,388.82 615 | 613,389.55 616 | 614,390.14 617 | 615,389.48 618 | 616,388.03 619 | 617,386.11 620 | 618,384.74 621 | 619,384.43 622 | 620,386.02 623 | 621,387.42 624 | 622,388.71 625 | 623,390.2 626 | 624,391.17 627 | 625,392.46 628 | 626,393 629 | 627,392.15 630 | 628,390.2 631 | 629,388.35 632 | 630,386.85 633 | 631,387.24 634 | 632,388.67 635 | 633,389.79 636 | 634,391.33 637 | 635,391.86 638 | 636,392.6 639 | 637,393.25 640 | 638,394.19 641 | 639,393.73 642 | 640,392.51 643 | 641,390.13 644 | 642,389.08 645 | 643,389 646 | 644,390.28 647 | 645,391.86 648 | 646,393.12 649 | 647,393.86 650 | 648,394.4 651 | 649,396.18 652 | 650,396.74 653 | 651,395.71 654 | 652,394.36 655 | 653,392.39 656 | 654,391.11 657 | 655,391.05 658 | 656,392.98 659 | 657,394.34 660 | 658,395.55 661 | 659,396.8 662 | 660,397.43 663 | 661,398.41 664 | 662,399.78 665 | 663,398.61 666 | 664,397.32 667 | 665,395.2 668 | 666,393.45 669 | 667,393.7 670 | 668,395.16 671 | 669,396.84 672 | 670,397.85 673 | 671,398.01 674 | 672,399.77 675 | 673,401.38 676 | 674,401.78 677 | 675,401.25 678 | 676,399.1 679 | 677,397.03 680 | 678,395.38 681 | 679,396.03 682 | 680,397.28 683 | 681,398.91 684 | 682,399.98 685 | 683,400.28 686 | 684,401.54 687 | 685,403.28 688 | 686,403.96 689 | 687,402.8 690 | 688,401.31 691 | 689,398.93 692 | 690,397.63 693 | 691,398.29 694 | 692,400.16 695 | 693,401.85 696 | 694,402.52 697 | 695,404.04 698 | 696,404.83 699 | 697,407.42 700 | 698,407.7 701 | 699,406.81 702 | 700,404.39 703 | 701,402.25 704 | 702,401.03 705 | 703,401.57 706 | 704,403.53 707 | 705,404.48 708 | 706,406.13 709 | 707,406.42 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | --------------------------------------------------------------------------------