├── .DS_Store
├── .github
├── ISSUE_TEMPLATE.md
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE.md
├── README.md
├── anomaly-detector-pypi-demo.ipynb
├── ipython-notebook
├── .DS_Store
├── .gitignore
├── API Sample
│ ├── Batch anomaly detection with the Anomaly Detector API.ipynb
│ ├── Latest point detection with the Anomaly Detector API.ipynb
│ ├── Multivariate API Demo Notebook.ipynb
│ └── 🆕MVAD API Demo (GA version).ipynb
├── LICENSE.md
├── README.md
├── SDK Sample
│ ├── Public Preview
│ │ ├── MVAD-SDK-Demo.ipynb
│ │ └── UVAD-SDK-Demo.ipynb
│ ├── 🆕MVAD-SDK-Demo.ipynb
│ └── 🆕UVAD-SDK-Demo.ipynb
└── media
│ ├── How to generate a SAS.mp4
│ ├── anomaly_detection1.png
│ ├── anomaly_detection2.png
│ ├── data-overview.png
│ ├── endpoint_key.png
│ └── inference-result.png
├── media
└── cognitive-services-get-access-keys.png
├── postman-demo
└── README.md
├── sampledata
├── README.md
├── multivariate
│ ├── 5_3000.json
│ ├── multivariate_sample-engine-simple.zip
│ ├── multivariate_sample.json
│ ├── multivariate_sample_data.csv
│ ├── multivariate_sample_data_5_3000.zip
│ └── sample_data_5_3000.csv
└── univariate
│ ├── batch-response.json
│ ├── change-point-sample-seasonality.json
│ ├── change-point-sample.json
│ ├── latest-point-response.json
│ ├── request-data.csv
│ ├── request-data.json
│ ├── univariate_sample_daily.json
│ └── univariate_sample_hourly.json
├── samples-multivariate
├── MultivariateSample.java
├── README.md
├── Sample_multivaraiate_detect.cs
├── sample_multivariate_detect.py
└── sample_multivariate_detection.js
├── samples-univariate
├── csharp-detect-anomalies.cs
├── java-detect-anomalies.java
├── python-detect-anomalies.py
└── sdk
│ ├── csharp-sdk-sample.cs
│ ├── go-sdk-sample.go
│ ├── node
│ ├── .gitignore
│ ├── README.md
│ ├── package.json
│ ├── src
│ │ └── index.ts
│ └── tsconfig.json
│ ├── python-sdk-sample.py
│ └── ruby-sdk-sample.rb
└── univariate-live-demo
├── README.md
├── demo.py
├── image.webp
├── requirements.txt
├── sensor_data.csv
└── utils.py
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/.DS_Store
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
4 | > Please provide us with the following information:
5 | > ---------------------------------------------------------------
6 |
7 | ### This issue is for a: (mark with an `x`)
8 | ```
9 | - [ ] bug report -> please search issues before submitting
10 | - [ ] feature request
11 | - [ ] documentation issue or request
12 | - [ ] regression (a behavior that used to work and stopped in a new release)
13 | ```
14 |
15 | ### Minimal steps to reproduce
16 | >
17 |
18 | ### Any log messages given by the failure
19 | >
20 |
21 | ### Expected/desired behavior
22 | >
23 |
24 | ### OS and Version?
25 | > Windows 7, 8 or 10. Linux (which distribution). macOS (Yosemite? El Capitan? Sierra?)
26 |
27 | ### Versions
28 | >
29 |
30 | ### Mention any other details that might be useful
31 |
32 | > ---------------------------------------------------------------
33 | > Thanks! We'll be in touch soon.
34 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Purpose
2 |
3 | * ...
4 |
5 | ## Does this introduce a breaking change?
6 |
7 | ```
8 | [ ] Yes
9 | [ ] No
10 | ```
11 |
12 | ## Pull Request Type
13 | What kind of change does this Pull Request introduce?
14 |
15 |
16 | ```
17 | [ ] Bugfix
18 | [ ] Feature
19 | [ ] Code style update (formatting, local variables)
20 | [ ] Refactoring (no functional changes, no api changes)
21 | [ ] Documentation content changes
22 | [ ] Other... Please describe:
23 | ```
24 |
25 | ## How to Test
26 | * Get the code
27 |
28 | ```
29 | git clone [repo-address]
30 | cd [repo-name]
31 | git checkout [branch-name]
32 | npm install
33 | ```
34 |
35 | * Test the code
36 |
37 | ```
38 | ```
39 |
40 | ## What to Check
41 | Verify that the following are valid
42 | * ...
43 |
44 | ## Other Information
45 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.suo
8 | *.user
9 | *.userosscache
10 | *.sln.docstates
11 |
12 | # User-specific files (MonoDevelop/Xamarin Studio)
13 | *.userprefs
14 |
15 | # Build results
16 | [Dd]ebug/
17 | [Dd]ebugPublic/
18 | [Rr]elease/
19 | [Rr]eleases/
20 | x64/
21 | x86/
22 | bld/
23 | [Bb]in/
24 | [Oo]bj/
25 | [Ll]og/
26 |
27 | # Visual Studio 2015/2017 cache/options directory
28 | .vs/
29 | # Uncomment if you have tasks that create the project's static files in wwwroot
30 | #wwwroot/
31 |
32 | # Visual Studio 2017 auto generated files
33 | Generated\ Files/
34 |
35 | # MSTest test Results
36 | [Tt]est[Rr]esult*/
37 | [Bb]uild[Ll]og.*
38 |
39 | # NUNIT
40 | *.VisualState.xml
41 | TestResult.xml
42 |
43 | # Build Results of an ATL Project
44 | [Dd]ebugPS/
45 | [Rr]eleasePS/
46 | dlldata.c
47 |
48 | # Benchmark Results
49 | BenchmarkDotNet.Artifacts/
50 |
51 | # .NET Core
52 | project.lock.json
53 | project.fragment.lock.json
54 | artifacts/
55 | **/Properties/launchSettings.json
56 |
57 | # StyleCop
58 | StyleCopReport.xml
59 |
60 | # Files built by Visual Studio
61 | *_i.c
62 | *_p.c
63 | *_i.h
64 | *.ilk
65 | *.meta
66 | *.obj
67 | *.iobj
68 | *.pch
69 | *.pdb
70 | *.ipdb
71 | *.pgc
72 | *.pgd
73 | *.rsp
74 | *.sbr
75 | *.tlb
76 | *.tli
77 | *.tlh
78 | *.tmp
79 | *.tmp_proj
80 | *.log
81 | *.vspscc
82 | *.vssscc
83 | .builds
84 | *.pidb
85 | *.svclog
86 | *.scc
87 |
88 | # Chutzpah Test files
89 | _Chutzpah*
90 |
91 | # Visual C++ cache files
92 | ipch/
93 | *.aps
94 | *.ncb
95 | *.opendb
96 | *.opensdf
97 | *.sdf
98 | *.cachefile
99 | *.VC.db
100 | *.VC.VC.opendb
101 |
102 | # Visual Studio profiler
103 | *.psess
104 | *.vsp
105 | *.vspx
106 | *.sap
107 |
108 | # Visual Studio Trace Files
109 | *.e2e
110 |
111 | # TFS 2012 Local Workspace
112 | $tf/
113 |
114 | # Guidance Automation Toolkit
115 | *.gpState
116 |
117 | # ReSharper is a .NET coding add-in
118 | _ReSharper*/
119 | *.[Rr]e[Ss]harper
120 | *.DotSettings.user
121 |
122 | # JustCode is a .NET coding add-in
123 | .JustCode
124 |
125 | # TeamCity is a build add-in
126 | _TeamCity*
127 |
128 | # DotCover is a Code Coverage Tool
129 | *.dotCover
130 |
131 | # AxoCover is a Code Coverage Tool
132 | .axoCover/*
133 | !.axoCover/settings.json
134 |
135 | # Visual Studio code coverage results
136 | *.coverage
137 | *.coveragexml
138 |
139 | # NCrunch
140 | _NCrunch_*
141 | .*crunch*.local.xml
142 | nCrunchTemp_*
143 |
144 | # MightyMoose
145 | *.mm.*
146 | AutoTest.Net/
147 |
148 | # Web workbench (sass)
149 | .sass-cache/
150 |
151 | # Installshield output folder
152 | [Ee]xpress/
153 |
154 | # DocProject is a documentation generator add-in
155 | DocProject/buildhelp/
156 | DocProject/Help/*.HxT
157 | DocProject/Help/*.HxC
158 | DocProject/Help/*.hhc
159 | DocProject/Help/*.hhk
160 | DocProject/Help/*.hhp
161 | DocProject/Help/Html2
162 | DocProject/Help/html
163 |
164 | # Click-Once directory
165 | publish/
166 |
167 | # Publish Web Output
168 | *.[Pp]ublish.xml
169 | *.azurePubxml
170 | # Note: Comment the next line if you want to checkin your web deploy settings,
171 | # but database connection strings (with potential passwords) will be unencrypted
172 | *.pubxml
173 | *.publishproj
174 |
175 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
176 | # checkin your Azure Web App publish settings, but sensitive information contained
177 | # in these scripts will be unencrypted
178 | PublishScripts/
179 |
180 | # NuGet Packages
181 | *.nupkg
182 | # The packages folder can be ignored because of Package Restore
183 | **/[Pp]ackages/*
184 | # except build/, which is used as an MSBuild target.
185 | !**/[Pp]ackages/build/
186 | # Uncomment if necessary however generally it will be regenerated when needed
187 | #!**/[Pp]ackages/repositories.config
188 | # NuGet v3's project.json files produces more ignorable files
189 | *.nuget.props
190 | *.nuget.targets
191 |
192 | # Microsoft Azure Build Output
193 | csx/
194 | *.build.csdef
195 |
196 | # Microsoft Azure Emulator
197 | ecf/
198 | rcf/
199 |
200 | # Windows Store app package directories and files
201 | AppPackages/
202 | BundleArtifacts/
203 | Package.StoreAssociation.xml
204 | _pkginfo.txt
205 | *.appx
206 |
207 | # Visual Studio cache files
208 | # files ending in .cache can be ignored
209 | *.[Cc]ache
210 | # but keep track of directories ending in .cache
211 | !*.[Cc]ache/
212 |
213 | # Others
214 | ClientBin/
215 | ~$*
216 | *~
217 | *.dbmdl
218 | *.dbproj.schemaview
219 | *.jfm
220 | *.pfx
221 | *.publishsettings
222 | orleans.codegen.cs
223 |
224 | # Including strong name files can present a security risk
225 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
226 | #*.snk
227 |
228 | # Since there are multiple workflows, uncomment next line to ignore bower_components
229 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
230 | #bower_components/
231 |
232 | # RIA/Silverlight projects
233 | Generated_Code/
234 |
235 | # Backup & report files from converting an old project file
236 | # to a newer Visual Studio version. Backup files are not needed,
237 | # because we have git ;-)
238 | _UpgradeReport_Files/
239 | Backup*/
240 | UpgradeLog*.XML
241 | UpgradeLog*.htm
242 | ServiceFabricBackup/
243 | *.rptproj.bak
244 |
245 | # SQL Server files
246 | *.mdf
247 | *.ldf
248 | *.ndf
249 |
250 | # Business Intelligence projects
251 | *.rdl.data
252 | *.bim.layout
253 | *.bim_*.settings
254 | *.rptproj.rsuser
255 |
256 | # Microsoft Fakes
257 | FakesAssemblies/
258 |
259 | # GhostDoc plugin setting file
260 | *.GhostDoc.xml
261 |
262 | # Node.js Tools for Visual Studio
263 | .ntvs_analysis.dat
264 | node_modules/
265 |
266 | # Visual Studio 6 build log
267 | *.plg
268 |
269 | # Visual Studio 6 workspace options file
270 | *.opt
271 |
272 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
273 | *.vbw
274 |
275 | # Visual Studio LightSwitch build output
276 | **/*.HTMLClient/GeneratedArtifacts
277 | **/*.DesktopClient/GeneratedArtifacts
278 | **/*.DesktopClient/ModelManifest.xml
279 | **/*.Server/GeneratedArtifacts
280 | **/*.Server/ModelManifest.xml
281 | _Pvt_Extensions
282 |
283 | # Paket dependency manager
284 | .paket/paket.exe
285 | paket-files/
286 |
287 | # FAKE - F# Make
288 | .fake/
289 |
290 | # JetBrains Rider
291 | .idea/
292 | *.sln.iml
293 |
294 | # CodeRush
295 | .cr/
296 |
297 | # Python Tools for Visual Studio (PTVS)
298 | __pycache__/
299 | *.pyc
300 |
301 | # Cake - Uncomment if you are using it
302 | # tools/**
303 | # !tools/packages.config
304 |
305 | # Tabs Studio
306 | *.tss
307 |
308 | # Telerik's JustMock configuration file
309 | *.jmconfig
310 |
311 | # BizTalk build output
312 | *.btp.cs
313 | *.btm.cs
314 | *.odx.cs
315 | *.xsd.cs
316 |
317 | # OpenCover UI analysis results
318 | OpenCover/
319 |
320 | # Azure Stream Analytics local run output
321 | ASALocalRun/
322 |
323 | # MSBuild Binary and Structured Log
324 | *.binlog
325 |
326 | # NVidia Nsight GPU debugger configuration file
327 | *.nvuser
328 |
329 | # MFractors (Xamarin productivity tool) working folder
330 | .mfractor/
331 |
332 | .vscode/
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to [project-title]
2 |
3 | This project welcomes contributions and suggestions. Most contributions require you to agree to a
4 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
5 | the rights to use your contribution. For details, visit https://cla.microsoft.com.
6 |
7 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide
8 | a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions
9 | provided by the bot. You will only need to do this once across all repos using our CLA.
10 |
11 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
12 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
13 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
14 |
15 | - [Code of Conduct](#coc)
16 | - [Issues and Bugs](#issue)
17 | - [Feature Requests](#feature)
18 | - [Submission Guidelines](#submit)
19 |
20 | ## Code of Conduct
21 | Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
22 |
23 | ## Found an Issue?
24 | If you find a bug in the source code or a mistake in the documentation, you can help us by
25 | [submitting an issue](#submit-issue) to the GitHub Repository. Even better, you can
26 | [submit a Pull Request](#submit-pr) with a fix.
27 |
28 | ## Want a Feature?
29 | You can *request* a new feature by [submitting an issue](#submit-issue) to the GitHub
30 | Repository. If you would like to *implement* a new feature, please submit an issue with
31 | a proposal for your work first, to be sure that we can use it.
32 |
33 | * **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
34 |
35 | ## Submission Guidelines
36 |
37 | ### Submitting an Issue
38 | Before you submit an issue, search the archive, maybe your question was already answered.
39 |
40 | If your issue appears to be a bug, and hasn't been reported, open a new issue.
41 | Help us to maximize the effort we can spend fixing issues and adding new
42 | features, by not reporting duplicate issues. Providing the following information will increase the
43 | chances of your issue being dealt with quickly:
44 |
45 | * **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps
46 | * **Version** - what version is affected (e.g. 0.1.2)
47 | * **Motivation for or Use Case** - explain what are you trying to do and why the current behavior is a bug for you
48 | * **Browsers and Operating System** - is this a problem with all browsers?
49 | * **Reproduce the Error** - provide a live example or a unambiguous set of steps
50 | * **Related Issues** - has a similar issue been reported before?
51 | * **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be
52 | causing the problem (line of code or commit)
53 |
54 | You can file new issues by providing the above information at the corresponding repository's issues link: https://github.com/[organization-name]/[repository-name]/issues/new].
55 |
56 | ### Submitting a Pull Request (PR)
57 | Before you submit your Pull Request (PR) consider the following guidelines:
58 |
59 | * Search the repository (https://github.com/[organization-name]/[repository-name]/pulls) for an open or closed PR
60 | that relates to your submission. You don't want to duplicate effort.
61 |
62 | * Make your changes in a new git fork:
63 |
64 | * Commit your changes using a descriptive commit message
65 | * Push your fork to GitHub:
66 | * In GitHub, create a pull request
67 | * If we suggest changes then:
68 | * Make the required updates.
69 | * Rebase your fork and force push to your GitHub repository (this will update your Pull Request):
70 |
71 | ```shell
72 | git rebase master -i
73 | git push -f
74 | ```
75 |
76 | That's it! Thank you for your contribution!
77 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation. All rights reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Anomaly Detector Samples
2 |
3 | This repository contains API samples and SDK samples for [Anomaly Detector service](https://aka.ms/anomalydetector). Anomaly Detector enables you to monitor and find abnormalities in your time series data by automatically identifying and applying the correct statistical models, regardless of industry, scenario, or data volume.
4 |
5 | ## What's new?
6 |
7 | ### March 2024: Anomaly Detector Now Available on PYPI 🎉
8 |
9 | In March 2024, we proudly announce the release of the Anomaly Detector package on PYPI!
10 |
11 | While the existing Anomaly Detector as a service will be deprecated by 2026, you can now seamlessly utilize the new package directly on your local machine. No need to create an Azure Anomaly Detector resource—simply install the package and start detecting anomalies right away.
12 |
13 | For the latest details and usage instructions, refer to our Python notebook available here: [anomaly-detector-pypi-demo.ipynb](anomaly-detector-pypi-demo.ipynb)
14 |
15 | ## 👋About Anomaly Detector
16 | [Anomaly Detector](https://learn.microsoft.com/en-us/azure/cognitive-services/anomaly-detector/overview) is an AI service with a set of APIs, which enables you to monitor and detect anomalies in your time series data with little ML knowledge, either batch validation or real-time inference.
17 |
18 | [Univariate Anomaly Detection API](https://learn.microsoft.com/en-us/azure/cognitive-services/anomaly-detector/how-to/identify-anomalies) enables you to monitor and detect abnormalities in your single variable without having to know machine learning. The Anomaly Detector API's algorithms adapt by automatically identifying and applying the best-fitting models to your data, regardless of industry, scenario, or data volume. Using your time series data, the API determines boundaries for anomaly detection, expected values, and which data points are anomalies.
19 |
20 | [Multivariate anomaly detection API](https://learn.microsoft.com/en-us/azure/cognitive-services/anomaly-detector/how-to/create-resource) further enable developers by easily integrating advanced AI for detecting anomalies from groups of metrics, without the need for machine learning knowledge or labeled data. Dependencies and inter-correlations between up to 300 different signals are now automatically counted as key factors. This new capability helps you to proactively protect your complex systems such as software applications, servers, factory machines, spacecraft, or even your business, from failures.
21 |
22 | ## Prerequisites
23 |
24 | You must have an [Anomaly Detector API resource](https://aka.ms/adnew). Before continuing, you will need the API **key** and the **endpoint** from your Azure dashboard.
25 | 
26 |
27 | Or you could create a 7-day free resource of Anomaly Detector from [here](https://azure.microsoft.com/en-us/try/cognitive-services/my-apis/).
28 |
29 | ## Content
30 |
31 | This repository is organized in the following structure, we recommend you go to `demo-notebook` first to try the simple samples if you are a fan of Python. 🤗
32 |
33 | | Folder | Description |
34 | |-------------|-------------|
35 | | 🆕[ipython-notebook](/ipython-notebook/) | [API](/ipython-notebook/API%20Sample/) and [SDK](/ipython-notebook/SDK%20Sample/) sample codes written in python notebook for UVAD adn MVAD. The latest update will start from here first. 😉 |
36 | | [sampledata](/sampledata/) | All the sample datasets that are used in this repository. |
37 | | [sample-multivariate](/samples-multivariate/) | Sample SDK codes for MVAD(preview version) using 4 languages. |
38 | | [sample-univariate](/samples-univariate/) | Sample API and SDK codes for UVAD using 4 languages. |
39 | | [univariate-live-demo](/univariate-live-demo/)| This includes a live demo that you could clone directly and ran on your data or make any modifications. |
40 | | [postman-demo](/postman-demo/) | This includes the tutorial of using postman to trigger Anomaly Detector, which could help better understand from API perspective. |
41 |
42 | ## 🔗Important links
43 |
44 | ### 1️⃣Microsoft Learn - Anomaly Detector
45 |
46 | - Learning module: [Identify abnormal time-series data with Anomaly Detector](https://learn.microsoft.com/en-us/training/modules/identify-abnormal-time-series-data-anomaly-detector/?WT.mc_id=data-12171-ruyakubu)
47 |
48 | ### 2️⃣API/SDK Sample
49 |
50 | - [Anomaly Detector Sample](https://github.com/Azure-Samples/AnomalyDetector)
51 | - [Anomaly Detector Sample in python notebook](https://github.com/Azure-Samples/AnomalyDetector/tree/master/ipython-notebook)
52 |
53 | ### 3️⃣Anomaly Detector in Synapse
54 |
55 | - [Tutorial: Use Multivariate Anomaly Detector in Azure Synapse Analytics](https://learn.microsoft.com/en-us/azure/cognitive-services/anomaly-detector/tutorials/multivariate-anomaly-detection-synapse)
56 | - [Sample notebook for MVAD in Synapse](https://github.com/jr-MS/MVAD-in-Synapse)
57 |
58 | ### 4️⃣Anomaly Detector in Azure Databricks
59 |
60 | - [Blog: Detect Anomalies in Equipment with Anomaly Detector in Azure Databricks](https://techcommunity.microsoft.com/t5/ai-cognitive-services-blog/detect-anomalies-in-equipment-with-anomaly-detector-in-azure/ba-p/3390688)
61 |
62 | ### 5️⃣Anomaly Detector in Azure Data Explorer
63 |
64 | - [Blog: Announcing Univariate Anomaly Detector in Azure Data Explorer](https://techcommunity.microsoft.com/t5/ai-applied-ai-blog/announcing-univariate-anomaly-detector-in-azure-data-explorer/ba-p/3285400)
65 | - [Documentation about anomaly detection function in ADX](https://learn.microsoft.com/en-us/azure/data-explorer/kusto/functions-library/series-uv-anomalies-fl?tabs=adhoc)
66 |
67 | ### 6️⃣Anomaly Detector PowerBI
68 |
69 | - [Anomaly Detection in PowerBI - UI](https://learn.microsoft.com/en-us/power-bi/visuals/power-bi-visualization-anomaly-detection)
70 | - [Anomaly Detection in PowerBI - PowerQuery](https://learn.microsoft.com/en-us/azure/cognitive-services/anomaly-detector/tutorials/batch-anomaly-detection-powerbi)
71 |
72 | ## Container demo
73 |
74 | (Only support UVAD)
75 |
76 | If you want to run the notebook with an on-premise UVAD version of [Anomaly Detector as container](https://aka.ms/adcontainerdocs), there're four prerequisites that must be met:
77 |
78 | 1. You have access to the Azure Container Registry which hosts the Anomaly Detector container images. Please complete and submit the [Anomaly Detector Container Request form](https://aka.ms/adcontainer) to request access to the container.
79 | 1. You have created an Anomaly Detector resource on Azure.
80 | 1. You have the proper container environment ready to host the Anomaly Detector container. Please read [Prerequisites](https://docs.microsoft.com/en-us/azure/cognitive-services/anomaly-detector/anomaly-detector-container-howto#prerequisites) and [The host computer](https://docs.microsoft.com/en-us/azure/cognitive-services/anomaly-detector/anomaly-detector-container-howto#the-host-computer) for details.
81 | 1. You have [Jupyter Notebook](https://jupyter.org/install.html) installed on your computer. We recommend installing Python and Jupyter using the [Anaconda Distribution](https://www.anaconda.com/downloads).
82 |
83 | After you pull the container image and spin it up, ensure there's an HTTP endpoint accessible to the APIs and this will be your **endpoint** for the demo.
84 | To run the notebook with your Anomaly Detector container instance, complete the following steps:
85 |
86 | 1. Clone this project to your local directory
87 | 1. Start **Anaconda Prompt**
88 | 1. In the command line, change the working directory to your project directory using **cd**
89 | 1. Type **jupyter notebook** and run which opens http://localhost:8888/tree in a browser window
90 | 1. Open one of the notebooks under **ipython-notebook** folder
91 | 1. Fill in the API key (from your Anomaly Detector resource on Azure) and the endpoint (from your Anomaly Detector container instance)
92 | 1. In the Notebook main menu, click Cell->run all
93 |
94 |
95 | ## ❤️Support
96 | Need support? [Join the Anomaly Detector Community](https://forms.office.com/pages/responsepage.aspx?id=v4j5cvGGr0GRqy180BHbR2Ci-wb6-iNDoBoNxrnEk9VURjNXUU1VREpOT0U1UEdURkc0OVRLSkZBNC4u).
97 |
--------------------------------------------------------------------------------
/anomaly-detector-pypi-demo.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "a2f4ea7a79dcc5e",
6 | "metadata": {
7 | "collapsed": false
8 | },
9 | "source": [
10 | "## Install package"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": null,
16 | "id": "a0f996e3f768461f",
17 | "metadata": {
18 | "collapsed": false,
19 | "vscode": {
20 | "languageId": "powershell"
21 | }
22 | },
23 | "outputs": [],
24 | "source": [
25 | "pip install time-series-anomaly-detector"
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "id": "8346a504",
31 | "metadata": {},
32 | "source": [
33 | "## Multivariate Anomaly Detection"
34 | ]
35 | },
36 | {
37 | "cell_type": "markdown",
38 | "id": "4f25e1b2",
39 | "metadata": {},
40 | "source": [
41 | "### 1. Train model\n",
42 | "To train a mvad model, the type of training data should be DataFrame
type. And you must specify the sliding_window
and device
in params
. \n",
43 | "\n",
44 | "Moreover, please note that in mvad, timestamp
of the training data is optional."
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": null,
50 | "id": "d5da3531",
51 | "metadata": {},
52 | "outputs": [],
53 | "source": [
54 | "import numpy as np\n",
55 | "import pandas as pd\n",
56 | "from anomaly_detector import MultivariateAnomalyDetector\n",
57 | "\n",
58 | "import json\n",
59 | "from pprint import pprint\n",
60 | "\n",
61 | "data_size = 1000\n",
62 | "var_num = 20\n",
63 | "\n",
64 | "training_data = np.random.randn(data_size, var_num)\n",
65 | "columns = [f\"variable_{i}\" for i in range(var_num)]\n",
66 | "training_data = pd.DataFrame(training_data, columns=columns)\n",
67 | "\n",
68 | "# Optional\n",
69 | "timestamps = pd.date_range(start=\"2023-01-03\", periods=data_size, freq=\"H\")\n",
70 | "training_data[\"timestamp\"] = timestamps.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n",
71 | "training_data = training_data.set_index(\"timestamp\", drop=True)\n",
72 | "\n",
73 | "params = {\"sliding_window\": 200, \"device\": \"cpu\"}\n",
74 | "\n",
75 | "model = MultivariateAnomalyDetector()\n",
76 | "\n",
77 | "# Train model\n",
78 | "model.fit(training_data, params=params)"
79 | ]
80 | },
81 | {
82 | "cell_type": "markdown",
83 | "id": "b918d943",
84 | "metadata": {},
85 | "source": [
86 | "### 2. Inference"
87 | ]
88 | },
89 | {
90 | "cell_type": "code",
91 | "execution_count": null,
92 | "id": "f3a010d7",
93 | "metadata": {},
94 | "outputs": [],
95 | "source": [
96 | "eval_data = np.random.randn(201, var_num)\n",
97 | "eval_data[-1, :] += 100\n",
98 | "eval_data = pd.DataFrame(eval_data, columns=columns)\n",
99 | "\n",
100 | "# Optional\n",
101 | "timestamps = pd.date_range(start=\"2023-01-03\", periods=201, freq=\"H\")\n",
102 | "eval_data[\"timestamp\"] = timestamps.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n",
103 | "eval_data = eval_data.set_index(\"timestamp\", drop=True)\n",
104 | "\n",
105 | "# prediction\n",
106 | "results = model.predict(data=eval_data, context=None)\n",
107 | "\n",
108 | "pprint(results)"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "id": "1cc129ba",
114 | "metadata": {},
115 | "source": [
116 | "## Univariate Anomaly Detection"
117 | ]
118 | },
119 | {
120 | "cell_type": "markdown",
121 | "id": "5d12be4c",
122 | "metadata": {},
123 | "source": [
124 | "### Inference\n",
125 | "\n",
126 | "Please note that the univariate anomaly detection does not need to train before inference, and timestamp
of the eval_data
must be specified."
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": null,
132 | "id": "232963b5",
133 | "metadata": {},
134 | "outputs": [],
135 | "source": [
136 | "import numpy as np\n",
137 | "import pandas as pd\n",
138 | "from anomaly_detector import EntireAnomalyDetector\n",
139 | "\n",
140 | "params = {\n",
141 | " \"granularity\": \"monthly\", \n",
142 | " \"maxAnomalyRatio\": 0.25, \n",
143 | " \"sensitivity\": 95, \n",
144 | " \"imputeMode\": \"auto\"\n",
145 | "}\n",
146 | "\n",
147 | "\n",
148 | "model = EntireAnomalyDetector()\n",
149 | "\n",
150 | "eval_data = np.ones(20)\n",
151 | "eval_data[-1] = 0\n",
152 | "eval_data = pd.DataFrame(eval_data, columns=[\"value\"])\n",
153 | "\n",
154 | "timestamps = pd.date_range(start=\"1962-01-01\", periods=20, freq=\"ME\")\n",
155 | "eval_data[\"timestamp\"] = timestamps\n",
156 | "\n",
157 | "results = model.predict(\n",
158 | " data=eval_data,\n",
159 | " params=params,\n",
160 | " context=None\n",
161 | ")\n",
162 | "print(results)\n"
163 | ]
164 | }
165 | ],
166 | "metadata": {
167 | "kernelspec": {
168 | "display_name": "Python 3",
169 | "language": "python",
170 | "name": "python3"
171 | },
172 | "language_info": {
173 | "codemirror_mode": {
174 | "name": "ipython",
175 | "version": 3
176 | },
177 | "file_extension": ".py",
178 | "mimetype": "text/x-python",
179 | "name": "python",
180 | "nbconvert_exporter": "python",
181 | "pygments_lexer": "ipython3",
182 | "version": "3.10.13"
183 | }
184 | },
185 | "nbformat": 4,
186 | "nbformat_minor": 5
187 | }
188 |
--------------------------------------------------------------------------------
/ipython-notebook/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/ipython-notebook/.DS_Store
--------------------------------------------------------------------------------
/ipython-notebook/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
--------------------------------------------------------------------------------
/ipython-notebook/API Sample/Batch anomaly detection with the Anomaly Detector API.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Batch anomaly detection with the Anomaly Detector API"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "### Use this Jupyter notebook to start visualizing anomalies as a batch with the Anomaly Detector API in Python.\n",
15 | "\n",
16 | "This notebook shows you how to send a batch anomaly detection request, and vizualize the anomalies found throughout the example data set. The graph created at the end of this notebook will display the following:\n",
17 | "* Anomalies found throughout the data set, highlighted.\n",
18 | "* The expected values versus the values contained in the data set.\n",
19 | "* Anomaly detection boundaries \n"
20 | ]
21 | },
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {},
25 | "source": [
26 | "To start sending requests to the Anomaly Detector API, paste your Anomaly Detector resource access key below,\n",
27 | "and replace the endpoint variable with the endpoint for your region or your on-premise container endpoint. \n",
28 | "\n",
29 | "Endpoint examples:\n",
30 | "\n",
31 | "`https://westus2.api.cognitive.microsoft.com/anomalydetector/v1.0/timeseries/entire/detect`\n",
32 | "\n",
33 | "`http://127.0.0.1:5000/anomalydetector/v1.0/timeseries/entire/detect`"
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": null,
39 | "metadata": {},
40 | "outputs": [],
41 | "source": [
42 | "apikey = '[Placeholder: Your Anomaly Detector resource access key]' \n",
43 | "endpoint = '[Placeholder: Your Anomaly Detector resource endpoint]/anomalydetector/v1.0/timeseries/entire/detect'"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": null,
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "import requests\n",
53 | "import json\n",
54 | "import pandas as pd\n",
55 | "import numpy as np\n",
56 | "import warnings\n",
57 | "warnings.filterwarnings('ignore')\n",
58 | "\n",
59 | "# Import library to display results\n",
60 | "import matplotlib.pyplot as plt\n",
61 | "%matplotlib inline "
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": null,
67 | "metadata": {},
68 | "outputs": [],
69 | "source": [
70 | "from bokeh.plotting import figure,output_notebook, show\n",
71 | "from bokeh.palettes import Blues4\n",
72 | "from bokeh.models import ColumnDataSource,Slider\n",
73 | "import datetime\n",
74 | "from bokeh.io import push_notebook\n",
75 | "from dateutil import parser\n",
76 | "from ipywidgets import interact, widgets, fixed\n",
77 | "output_notebook()"
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "execution_count": null,
83 | "metadata": {},
84 | "outputs": [],
85 | "source": [
86 | "def detect(endpoint, apikey, request_data):\n",
87 | " headers = {'Content-Type': 'application/json', 'Ocp-Apim-Subscription-Key': apikey}\n",
88 | " response = requests.post(endpoint, data=json.dumps(request_data), headers=headers)\n",
89 | " if response.status_code == 200:\n",
90 | " return json.loads(response.content.decode(\"utf-8\"))\n",
91 | " else:\n",
92 | " print(response.status_code)\n",
93 | " raise Exception(response.text)"
94 | ]
95 | },
96 | {
97 | "cell_type": "code",
98 | "execution_count": null,
99 | "metadata": {},
100 | "outputs": [],
101 | "source": [
102 | "def build_figure(sample_data, sensitivity):\n",
103 | " sample_data['sensitivity'] = sensitivity\n",
104 | " result = detect(endpoint, apikey, sample_data)\n",
105 | " columns = {'expectedValues': result['expectedValues'], 'isAnomaly': result['isAnomaly'], 'isNegativeAnomaly': result['isNegativeAnomaly'],\n",
106 | " 'isPositiveAnomaly': result['isPositiveAnomaly'], 'upperMargins': result['upperMargins'], 'lowerMargins': result['lowerMargins'],\n",
107 | " 'timestamp': [parser.parse(x['timestamp']) for x in sample_data['series']], \n",
108 | " 'value': [x['value'] for x in sample_data['series']]}\n",
109 | " response = pd.DataFrame(data=columns)\n",
110 | " values = response['value']\n",
111 | " label = response['timestamp']\n",
112 | " anomalies = []\n",
113 | " anomaly_labels = []\n",
114 | " index = 0\n",
115 | " anomaly_indexes = []\n",
116 | " p = figure(x_axis_type='datetime', title=\"Batch Anomaly Detection ({0} Sensitvity)\".format(sensitivity), width=800, height=600)\n",
117 | " for anom in response['isAnomaly']:\n",
118 | " if anom == True and (values[index] > response.iloc[index]['expectedValues'] + response.iloc[index]['upperMargins'] or \n",
119 | " values[index] < response.iloc[index]['expectedValues'] - response.iloc[index]['lowerMargins']):\n",
120 | " anomalies.append(values[index])\n",
121 | " anomaly_labels.append(label[index])\n",
122 | " anomaly_indexes.append(index)\n",
123 | " index = index+1\n",
124 | " upperband = response['expectedValues'] + response['upperMargins']\n",
125 | " lowerband = response['expectedValues'] -response['lowerMargins']\n",
126 | " band_x = np.append(label, label[::-1])\n",
127 | " band_y = np.append(lowerband, upperband[::-1])\n",
128 | " boundary = p.patch(band_x, band_y, color=Blues4[2], fill_alpha=0.5, line_width=1, legend_label='Boundary')\n",
129 | " p.line(label, values, legend_label='Value', color=\"#2222aa\", line_width=1)\n",
130 | " p.line(label, response['expectedValues'], legend_label='ExpectedValue', line_width=1, line_dash=\"dotdash\", line_color='olivedrab')\n",
131 | " anom_source = ColumnDataSource(dict(x=anomaly_labels, y=anomalies))\n",
132 | " anoms = p.circle('x', 'y', size=5, color='tomato', source=anom_source)\n",
133 | " p.legend.border_line_width = 1\n",
134 | " p.legend.background_fill_alpha = 0.1\n",
135 | " show(p, notebook_handle=True)"
136 | ]
137 | },
138 | {
139 | "cell_type": "markdown",
140 | "metadata": {},
141 | "source": [
142 | "## Vizualizing anomalies throughout your data\n",
143 | "\n",
144 | "The following cells call the Anomaly Detector API with two different example time series data sets, and different sensitivities for anomaly detection. Varying the sensitivity of the Anomaly Detector API can improve how well the response fits your data."
145 | ]
146 | },
147 | {
148 | "cell_type": "markdown",
149 | "metadata": {},
150 | "source": [
151 | "### Example 1: time series with an hourly sampling frequency\n"
152 | ]
153 | },
154 | {
155 | "cell_type": "code",
156 | "execution_count": null,
157 | "metadata": {},
158 | "outputs": [],
159 | "source": [
160 | "# Hourly Sample\n",
161 | "sample_data = json.load(open('../../sampledata/univariate/univariate_sample_hourly.json'))\n",
162 | "sample_data['granularity'] = 'hourly'\n",
163 | "sample_data['period'] = 24\n",
164 | "# 95 sensitivity\n",
165 | "build_figure(sample_data,95)"
166 | ]
167 | },
168 | {
169 | "cell_type": "code",
170 | "execution_count": null,
171 | "metadata": {},
172 | "outputs": [],
173 | "source": [
174 | "# 90 sensitivity\n",
175 | "build_figure(sample_data,90)"
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": null,
181 | "metadata": {},
182 | "outputs": [],
183 | "source": [
184 | "#85 sensitivity\n",
185 | "build_figure(sample_data,85)"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {},
191 | "source": [
192 | "### Example 2: time series with an daily sampling frequency\n"
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": null,
198 | "metadata": {},
199 | "outputs": [],
200 | "source": [
201 | "#daily sample\n",
202 | "sample_data = json.load(open('../../sampledata/univariate/univariate_sample_daily.json'))\n",
203 | "sample_data['granularity'] = 'daily'\n",
204 | "# 95 sensitivity\n",
205 | "build_figure(sample_data,95)"
206 | ]
207 | },
208 | {
209 | "cell_type": "code",
210 | "execution_count": null,
211 | "metadata": {},
212 | "outputs": [],
213 | "source": [
214 | "# 90 sensitivity\n",
215 | "build_figure(sample_data,90)"
216 | ]
217 | },
218 | {
219 | "cell_type": "code",
220 | "execution_count": null,
221 | "metadata": {},
222 | "outputs": [],
223 | "source": [
224 | "# 85 sensitivity\n",
225 | "build_figure(sample_data,80)"
226 | ]
227 | },
228 | {
229 | "cell_type": "code",
230 | "execution_count": null,
231 | "metadata": {},
232 | "outputs": [],
233 | "source": []
234 | }
235 | ],
236 | "metadata": {
237 | "kernelspec": {
238 | "display_name": "Python 3 (ipykernel)",
239 | "language": "python",
240 | "name": "python3"
241 | },
242 | "language_info": {
243 | "codemirror_mode": {
244 | "name": "ipython",
245 | "version": 3
246 | },
247 | "file_extension": ".py",
248 | "mimetype": "text/x-python",
249 | "name": "python",
250 | "nbconvert_exporter": "python",
251 | "pygments_lexer": "ipython3",
252 | "version": "3.10.9"
253 | },
254 | "vscode": {
255 | "interpreter": {
256 | "hash": "530dd268f570c5cdc0ceeb4c9b04e15d56fa20bfd35f8d335952cdbe40d1c280"
257 | }
258 | }
259 | },
260 | "nbformat": 4,
261 | "nbformat_minor": 4
262 | }
263 |
--------------------------------------------------------------------------------
/ipython-notebook/API Sample/Latest point detection with the Anomaly Detector API.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Latest point anomaly detection with the Anomaly Detector API"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "### Use this Jupyter notebook to start visualizing anomalies as a batch with the Anomaly Detector API in Python.\n",
15 | "\n",
16 | "While you can detect anomalies as a batch, you can also detect the anomaly status of the last data point in the time series. This notebook iteratively sends latest-point anomaly detection requests to the Anomaly Detector API and visualizes the response. The graph created at the end of this notebook will display the following:\n",
17 | "* Anomalies found while in the data set, highlighted.\n",
18 | "* Anomaly detection boundaries \n",
19 | "* Anomalies seen in the data, highlighted.\n",
20 | "\n",
21 | "By calling the API on your data's latest points, you can monitor your data as it's created. \n",
22 | "\n",
23 | "The following example simulates using the Anomaly Detector API on streaming data. Sections of the example time series are sent to the API over multiple iterations, and the anomaly status of each section's last data point is saved. The data set used in this example has a pattern that repeats roughly every 7 data points (the `period` in the request's JSON file), so for best results, the data set is sent in groups of 29 points (`4 * + an extra data point`. See [Best practices for using the Anomaly Detector API](https://docs.microsoft.com/azure/cognitive-services/anomaly-detector/concepts/anomaly-detection-best-practices) for more information). "
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "To start sending requests to the Anomaly Detector API, paste your Anomaly Detector resource access key below,\n",
31 | "and replace the endpoint variable with the endpoint for your region or your on-premise container endpoint. \n",
32 | "\n",
33 | "Endpoint examples:\n",
34 | "\n",
35 | "`https://westus2.api.cognitive.microsoft.com/anomalydetector/v1.0/timeseries/last/detect`\n",
36 | "\n",
37 | "`http://127.0.0.1:5000/anomalydetector/v1.0/timeseries/last/detect`"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "apikey = '[Placeholder: Your Anomaly Detector resource access key]' \n",
47 | "endpoint_latest = '[Placeholder: Your Anomaly Detector resource endpoint]/anomalydetector/v1.0/timeseries/last/detect'"
48 | ]
49 | },
50 | {
51 | "cell_type": "code",
52 | "execution_count": null,
53 | "metadata": {},
54 | "outputs": [],
55 | "source": [
56 | "import requests\n",
57 | "import json\n",
58 | "import pandas as pd\n",
59 | "import numpy as np\n",
60 | "import warnings\n",
61 | "warnings.filterwarnings('ignore')\n",
62 | "\n",
63 | "# Import library to display results\n",
64 | "import matplotlib.pyplot as plt\n",
65 | "%matplotlib inline "
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": [
74 | "from bokeh.plotting import figure,output_notebook, show\n",
75 | "from bokeh.palettes import Blues4\n",
76 | "from bokeh.models import ColumnDataSource,Slider\n",
77 | "import datetime\n",
78 | "from bokeh.io import push_notebook\n",
79 | "from dateutil import parser\n",
80 | "from ipywidgets import interact, widgets, fixed\n",
81 | "output_notebook()"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "metadata": {},
88 | "outputs": [],
89 | "source": [
90 | "def detect(endpoint, apikey, request_data):\n",
91 | " headers = {'Content-Type': 'application/json', 'Ocp-Apim-Subscription-Key': apikey}\n",
92 | " response = requests.post(endpoint, data=json.dumps(request_data), headers=headers)\n",
93 | " if response.status_code == 200:\n",
94 | " return json.loads(response.content.decode(\"utf-8\"))\n",
95 | " else:\n",
96 | " print(response.status_code)\n",
97 | " raise Exception(response.text)"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": null,
103 | "metadata": {},
104 | "outputs": [],
105 | "source": [
106 | "def build_figure(result, sample_data, sensitivity):\n",
107 | " columns = {'expectedValues': result['expectedValues'], 'isAnomaly': result['isAnomaly'], 'isNegativeAnomaly': result['isNegativeAnomaly'],\n",
108 | " 'isPositiveAnomaly': result['isPositiveAnomaly'], 'upperMargins': result['upperMargins'], 'lowerMargins': result['lowerMargins']\n",
109 | " , 'value': [x['value'] for x in sample_data['series']], 'timestamp': [parser.parse(x['timestamp']) for x in sample_data['series']]}\n",
110 | " response = pd.DataFrame(data=columns)\n",
111 | " values = response['value']\n",
112 | " label = response['timestamp']\n",
113 | " anomalies = []\n",
114 | " anomaly_labels = []\n",
115 | " index = 0\n",
116 | " anomaly_indexes = []\n",
117 | " p = figure(x_axis_type='datetime', title=\"Anomaly Detection Result ({0} Sensitivity)\".format(sensitivity), width=800, height=600)\n",
118 | " for anom in response['isAnomaly']:\n",
119 | " if anom == True and (values[index] > response.iloc[index]['expectedValues'] + response.iloc[index]['upperMargins'] or \n",
120 | " values[index] < response.iloc[index]['expectedValues'] - response.iloc[index]['lowerMargins']):\n",
121 | " anomalies.append(values[index])\n",
122 | " anomaly_labels.append(label[index])\n",
123 | " anomaly_indexes.append(index)\n",
124 | " index = index+1\n",
125 | " upperband = response['expectedValues'] + response['upperMargins']\n",
126 | " lowerband = response['expectedValues'] -response['lowerMargins']\n",
127 | " band_x = np.append(label, label[::-1])\n",
128 | " band_y = np.append(lowerband, upperband[::-1])\n",
129 | " boundary = p.patch(band_x, band_y, color=Blues4[2], fill_alpha=0.5, line_width=1, legend_label='Boundary')\n",
130 | " p.line(label, values, legend_label='value', color=\"#2222aa\", line_width=1)\n",
131 | " p.line(label, response['expectedValues'], legend_label='expectedValue', line_width=1, line_dash=\"dotdash\", line_color='olivedrab')\n",
132 | " anom_source = ColumnDataSource(dict(x=anomaly_labels, y=anomalies))\n",
133 | " anoms = p.circle('x', 'y', size=5, color='tomato', source=anom_source)\n",
134 | " p.legend.border_line_width = 1\n",
135 | " p.legend.background_fill_alpha = 0.1\n",
136 | " show(p, notebook_handle=True)"
137 | ]
138 | },
139 | {
140 | "cell_type": "markdown",
141 | "metadata": {},
142 | "source": [
143 | "### Detect latest anomaly of sample timeseries"
144 | ]
145 | },
146 | {
147 | "cell_type": "markdown",
148 | "metadata": {},
149 | "source": [
150 | "The following cells call the Anomaly Detector API with an example time series data set and different sensitivities for anomaly detection. Varying the sensitivity of the Anomaly Detector API can improve how well the response fits your data. "
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": null,
156 | "metadata": {},
157 | "outputs": [],
158 | "source": [
159 | "def detect_anomaly(sensitivity):\n",
160 | " sample_data = json.load(open('univariate_sample_daily.json'))\n",
161 | " points = sample_data['series']\n",
162 | " skip_point = 29\n",
163 | " result = {'expectedValues': [None]*len(points), 'upperMargins': [None]*len(points), \n",
164 | " 'lowerMargins': [None]*len(points), 'isNegativeAnomaly': [False]*len(points), \n",
165 | " 'isPositiveAnomaly':[False]*len(points), 'isAnomaly': [False]*len(points)}\n",
166 | " anom_count = 0\n",
167 | " for i in range(skip_point, len(points)+1):\n",
168 | " single_sample_data = {}\n",
169 | " single_sample_data['series'] = points[i-29:i]\n",
170 | " single_sample_data['granularity'] = 'daily'\n",
171 | " single_sample_data['maxAnomalyRatio'] = 0.25\n",
172 | " single_sample_data['sensitivity'] = sensitivity\n",
173 | " single_point = detect(endpoint_latest, apikey, single_sample_data)\n",
174 | " if single_point['isAnomaly'] == True:\n",
175 | " anom_count = anom_count + 1\n",
176 | "\n",
177 | " result['expectedValues'][i-1] = single_point['expectedValue']\n",
178 | " result['upperMargins'][i-1] = single_point['upperMargin']\n",
179 | " result['lowerMargins'][i-1] = single_point['lowerMargin']\n",
180 | " result['isNegativeAnomaly'][i-1] = single_point['isNegativeAnomaly']\n",
181 | " result['isPositiveAnomaly'][i-1] = single_point['isPositiveAnomaly']\n",
182 | " result['isAnomaly'][i-1] = single_point['isAnomaly']\n",
183 | " \n",
184 | " build_figure(result, sample_data, sensitivity)"
185 | ]
186 | },
187 | {
188 | "cell_type": "code",
189 | "execution_count": null,
190 | "metadata": {},
191 | "outputs": [],
192 | "source": [
193 | "# 95 sensitvity\n",
194 | "detect_anomaly(95)"
195 | ]
196 | },
197 | {
198 | "cell_type": "code",
199 | "execution_count": null,
200 | "metadata": {},
201 | "outputs": [],
202 | "source": [
203 | "# 85 sensitvity\n",
204 | "detect_anomaly(85)"
205 | ]
206 | }
207 | ],
208 | "metadata": {
209 | "kernelspec": {
210 | "display_name": "Python 3 (ipykernel)",
211 | "language": "python",
212 | "name": "python3"
213 | },
214 | "language_info": {
215 | "codemirror_mode": {
216 | "name": "ipython",
217 | "version": 3
218 | },
219 | "file_extension": ".py",
220 | "mimetype": "text/x-python",
221 | "name": "python",
222 | "nbconvert_exporter": "python",
223 | "pygments_lexer": "ipython3",
224 | "version": "3.10.9"
225 | }
226 | },
227 | "nbformat": 4,
228 | "nbformat_minor": 4
229 | }
230 |
--------------------------------------------------------------------------------
/ipython-notebook/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation. All rights reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
--------------------------------------------------------------------------------
/ipython-notebook/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - python
5 | products:
6 | - azure-cognitive-services
7 | ---
8 |
9 | # Detect and visualize anomalies in your data with the Anomaly Detector API
10 |
11 | 
12 |
13 | These python notebooks show you how to start detecting anomalies in your data with the Anomaly Detector API, and visualizing the information returned by it.
14 |
15 | ## Contents
16 |
17 | | File/folder | Description |
18 | |-------------|-------------|
19 | | `API Sample` | Jupyter notebooks of how to use the Anomaly Detector APIs. |
20 | | `SDK Sample` | Jupyter notebooks of how to use the Anomaly Detector SDKs. |
21 | | `Media` | Images that used in the notebooks. |
22 | | `README.md` | This README file. |
23 | | `LICENSE` | The license for the sample. |
24 |
25 | ## Prerequisites
26 |
27 | - [Create an Anomaly Detector resource](https://ms.portal.azure.com/#create/Microsoft.CognitiveServicesAnomalyDetector) with access to the Anomaly Detector API. Before continuing, you will need the endpoint and key of this resource.
28 | ## Running the sample
29 |
30 | You can run this sample locally as a Jupyter notebook.
31 |
32 | 1. Make sure Jupyter Notebook is running.
33 | 2. Navigate to the Jupyter Notebooks for this sample, and click on one.
34 | 3. Add your valid Anomaly Detector API subscription key to the `subscription_key` variable.
35 | 4. Click **Kernel**, then **Restart & Run All** to run the notebook.
36 |
37 | ## Key concepts
38 |
39 | The Anomaly Detector API lets you monitor and detect abnormalities in your time series data without previous experience in machine learning. The API adapts by automatically identifying and applying the best fitting statistical models to your data, regardless of industry, scenario, or data volume. These python notebooks cover the following examples.
40 |
41 | |Example |Description |
42 | |---------|---------|
43 | | Latest-point anomaly detection | Use previously seen data points to determine if the latest one in the data set is an anomaly. This example simulates using the Anomaly Detector API on streaming data by iterating over the data set and sending API requests at predetermined positions. By calling the API with each new data point you generate, you can monitor your data as it's created. |
44 | |Batch anomaly detection | Use a time series data set to detect any anomalies that might exist as a batch. This example sends example data sets in a single Anomaly Detector API request. |
45 |
46 | ## Next steps
47 |
48 | For more information, see the [Anomaly Detector API documentation](https://aka.ms/anomaly-detector-documentation).
49 | Need support? [Join the Anomaly Detector Community](https://forms.office.com/pages/responsepage.aspx?id=v4j5cvGGr0GRqy180BHbR2Ci-wb6-iNDoBoNxrnEk9VURjNXUU1VREpOT0U1UEdURkc0OVRLSkZBNC4u).
50 |
--------------------------------------------------------------------------------
/ipython-notebook/media/How to generate a SAS.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/ipython-notebook/media/How to generate a SAS.mp4
--------------------------------------------------------------------------------
/ipython-notebook/media/anomaly_detection1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/ipython-notebook/media/anomaly_detection1.png
--------------------------------------------------------------------------------
/ipython-notebook/media/anomaly_detection2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/ipython-notebook/media/anomaly_detection2.png
--------------------------------------------------------------------------------
/ipython-notebook/media/data-overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/ipython-notebook/media/data-overview.png
--------------------------------------------------------------------------------
/ipython-notebook/media/endpoint_key.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/ipython-notebook/media/endpoint_key.png
--------------------------------------------------------------------------------
/ipython-notebook/media/inference-result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/ipython-notebook/media/inference-result.png
--------------------------------------------------------------------------------
/media/cognitive-services-get-access-keys.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/media/cognitive-services-get-access-keys.png
--------------------------------------------------------------------------------
/postman-demo/README.md:
--------------------------------------------------------------------------------
1 | # How to run MVAD(v1.1) in postman
2 |
3 | ### API Overview
4 | There are 7 APIs provided in Multivariate Anomaly Deteciton:
5 | * **Training**: Use `Train Model API` to create and train a model, then use `Get Model Status API` to get the status and model metadata.
6 | * **Inference**:
7 | * Use `Async Inference API` to trigger an asynchronous inference process and use `Get Inference results API` to get detection results on a batch of data.
8 | * You could also use `Sync Inference API` to trigger a detection on one timestamp every time.
9 | * **Other operations**: `List Model API` and `Delete Model API` are supported in MVAD for model management.
10 | 
11 |
12 |
13 |
14 | |API Name| Method | Path | Description |
15 | | ------ | ---- | ----------- | ------ |
16 | |**Train Model**| POST | `{endpoint}`/anomalydetector/v1.1/multivariate/models | Create and train a model |
17 | |**Get Model Status**| GET | `{endpoint}`anomalydetector/v1.1/multivariate/models/`{modelId}` | Get model status and model metadata with `modelId` |
18 | |**Async Inference**| POST | `{endpoint}`/anomalydetector/v1.1/multivariate/models/`{modelId}`:detect-batch | Trigger an asynchronous inference with `modelId` |
19 | |**Get Inference Results**| GET | `{endpoint}`/anomalydetector/v1.1/multivariate/detect-batch/`{resultId}` | Get asynchronous inference resulsts with `resultId` |
20 | |**Sync Inference**| POST | `{endpoint}`/anomalydetector/v1.1/multivariate/models/`{modelId}`:detect-last | Trigger a synchronous inference with `modelId` |
21 | |**List Model**| GET | `{endpoint}`/anomalydetector/v1.1/multivariate/models | List all models |
22 | |**Delete Model**| DELET | `{endpoint}`/anomalydetector/v1.1/multivariate/models/`{modelId}` | Delete model with `modelId` |
23 |
24 | Please click this button to fork the API collection:
25 | [](https://app.getpostman.com/run-collection/18763802-b90da6d8-0f98-4200-976f-546342abcade?action=collection%2Ffork&collection-url=entityId%3D18763802-b90da6d8-0f98-4200-976f-546342abcade%26entityType%3Dcollection%26workspaceId%3De1370b45-5076-4885-884f-e9a97136ddbc#?env%5BMVAD%5D=W3sia2V5IjoibW9kZWxJZCIsInZhbHVlIjoiIiwiZW5hYmxlZCI6dHJ1ZSwidHlwZSI6ImRlZmF1bHQiLCJzZXNzaW9uVmFsdWUiOiJlNjQxZTJlYy01Mzg5LTExZWQtYTkyMC01MjcyNGM4YTZkZmEiLCJzZXNzaW9uSW5kZXgiOjB9LHsia2V5IjoicmVzdWx0SWQiLCJ2YWx1ZSI6IiIsImVuYWJsZWQiOnRydWUsInR5cGUiOiJkZWZhdWx0Iiwic2Vzc2lvblZhbHVlIjoiOGZkZTAwNDItNTM4YS0xMWVkLTlhNDEtMGUxMGNkOTEwZmZhIiwic2Vzc2lvbkluZGV4IjoxfSx7ImtleSI6Ik9jcC1BcGltLVN1YnNjcmlwdGlvbi1LZXkiLCJ2YWx1ZSI6IiIsImVuYWJsZWQiOnRydWUsInR5cGUiOiJzZWNyZXQiLCJzZXNzaW9uVmFsdWUiOiJjNzNjMGRhMzlhOTA0MjgzODA4ZjBmY2E0Zjc3MTFkOCIsInNlc3Npb25JbmRleCI6Mn0seyJrZXkiOiJlbmRwb2ludCIsInZhbHVlIjoiIiwiZW5hYmxlZCI6dHJ1ZSwidHlwZSI6ImRlZmF1bHQiLCJzZXNzaW9uVmFsdWUiOiJodHRwczovL211bHRpLWFkLXRlc3QtdXNjeC5jb2duaXRpdmVzZXJ2aWNlcy5henVyZS5jb20vIiwic2Vzc2lvbkluZGV4IjozfSx7ImtleSI6ImRhdGFTb3VyY2UiLCJ2YWx1ZSI6IiIsImVuYWJsZWQiOnRydWUsInR5cGUiOiJkZWZhdWx0Iiwic2Vzc2lvblZhbHVlIjoiaHR0cHM6Ly9tdmFkZGF0YXNldC5ibG9iLmNvcmUud2luZG93cy5uZXQvc2FtcGxlLW9uZXRhYmxlL3NhbXBsZV9kYXRhXzVfMzAwMC5jc3YiLCJzZXNzaW9uSW5kZXgiOjR9XQ==)
26 |
27 | 1. Select environment as **MVAD**.
28 | 
29 |
30 |
31 | 1. Select **Environment**, paste your Anomaly Detector `endpoint`, `key` and dataSource `url` in to the **CURRENT VALUE** column, click **Save** to let the variables take effect.
32 | 
33 |
34 | 2. Select **Collections**, and click on the first API - **Create and train a model**, then click **Send**.
35 |
36 | ***Note:** If your data is one CSV file, please set the dataSchema as **OneTable**, if your data is multiple CSV files in a folder, please set the dataSchema as **MultiTable.***
37 |
38 | 
39 |
40 | 3. In the response of the first API, copy the modelId and paste it in the `modelId` in **Environments**, click **Save**. Then go to **Collections**, click on the second API - **Get model status**, and click **Send**.
41 | 
42 |
43 | 4. Select the third API - **Batch Detection**, and click **Send**. This API will trigger an asynchronous inference task, and you should use the Get batch detection results API several times to get the status and the final results.
44 | 
45 |
46 | 5. In the response of the third API, copy the resultId and paste it in the `resultId` in **Environments**, click **Save**. Then go to **Collections**, click on the fourth API - Get batch detection results, and click **Send**.
47 | 
48 |
49 | 6. For the rest of the APIs, click on each and click Send to test on their request and response.
50 | 
--------------------------------------------------------------------------------
/sampledata/README.md:
--------------------------------------------------------------------------------
1 | # Sample Data
2 |
3 | This folder contains 2 sub folders: *univariate* and *multivariate*. You could use them in the sample code.
4 |
5 | For more questions and discussions: Join our community through https://aka.ms/adadvisorsjoin
6 |
--------------------------------------------------------------------------------
/sampledata/multivariate/multivariate_sample-engine-simple.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/sampledata/multivariate/multivariate_sample-engine-simple.zip
--------------------------------------------------------------------------------
/sampledata/multivariate/multivariate_sample_data_5_3000.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/sampledata/multivariate/multivariate_sample_data_5_3000.zip
--------------------------------------------------------------------------------
/sampledata/univariate/batch-response.json:
--------------------------------------------------------------------------------
1 | {
2 | "expectedValues": [
3 | 32894418.9615615,
4 | 29707932.244719882,
5 | 22651867.032410353,
6 | 24943247.989138011,
7 | 34098022.110827051,
8 | 33893733.15343374,
9 | 33668289.17375017,
10 | 32807561.144138098,
11 | 29618567.705954053,
12 | 22584659.813523095,
13 | 24383099.07,
14 | 34092130.348607078,
15 | 33930369.335444734,
16 | 33771835.547849737,
17 | 32979718.237272907,
18 | 29873907.686442,
19 | 22898518.640143186,
20 | 25266675.592631694,
21 | 34507880.720261112,
22 | 34392110.769727185,
23 | 34264840.807082534,
24 | 33381054.952848945,
25 | 30169003.690043379,
26 | 23087373.931769907,
27 | 25351908.536010381,
28 | 34479836.305212229,
29 | 34250788.996250749,
30 | 33423011.962283727,
31 | 32615075.763498921,
32 | 29478874.156142138,
33 | 22473094.05331745,
34 | 24813478.313006707,
35 | 34017255.737657338,
36 | 33864058.084144637,
37 | 33577519.658487104,
38 | 32851940.511712912,
39 | 29199901.5,
40 | 22918033.675673913,
41 | 25384135.757495031,
42 | 34746812.760001436,
43 | 34752514.684344508,
44 | 34634850.418293975,
45 | 33883602.629261605,
46 | 30764952.726340119,
47 | 23773788.078124832,
48 | 26128787.792423487,
49 | 35344244.421857625
50 | ],
51 | "isAnomaly": [
52 | false,
53 | false,
54 | false,
55 | true,
56 | false,
57 | false,
58 | false,
59 | false,
60 | false,
61 | false,
62 | false,
63 | false,
64 | false,
65 | false,
66 | false,
67 | false,
68 | false,
69 | false,
70 | true,
71 | false,
72 | false,
73 | true,
74 | true,
75 | true,
76 | true,
77 | true,
78 | false,
79 | false,
80 | true,
81 | true,
82 | true,
83 | true,
84 | true,
85 | false,
86 | false,
87 | true,
88 | false,
89 | false,
90 | false,
91 | false,
92 | false,
93 | false,
94 | false,
95 | false,
96 | true,
97 | false,
98 | false
99 | ],
100 | "isNegativeAnomaly": [
101 | false,
102 | false,
103 | false,
104 | false,
105 | false,
106 | false,
107 | false,
108 | false,
109 | false,
110 | false,
111 | false,
112 | false,
113 | false,
114 | false,
115 | false,
116 | false,
117 | false,
118 | false,
119 | true,
120 | false,
121 | false,
122 | false,
123 | false,
124 | false,
125 | false,
126 | false,
127 | false,
128 | false,
129 | true,
130 | true,
131 | true,
132 | true,
133 | true,
134 | false,
135 | false,
136 | true,
137 | false,
138 | false,
139 | false,
140 | false,
141 | false,
142 | false,
143 | false,
144 | false,
145 | true,
146 | false,
147 | false
148 | ],
149 | "isPositiveAnomaly": [
150 | false,
151 | false,
152 | false,
153 | true,
154 | false,
155 | false,
156 | false,
157 | false,
158 | false,
159 | false,
160 | false,
161 | false,
162 | false,
163 | false,
164 | false,
165 | false,
166 | false,
167 | false,
168 | false,
169 | false,
170 | false,
171 | true,
172 | true,
173 | true,
174 | true,
175 | true,
176 | false,
177 | false,
178 | false,
179 | false,
180 | false,
181 | false,
182 | false,
183 | false,
184 | false,
185 | false,
186 | false,
187 | false,
188 | false,
189 | false,
190 | false,
191 | false,
192 | false,
193 | false,
194 | false,
195 | false,
196 | false
197 | ],
198 | "lowerMargins": [
199 | 328944.189615615,
200 | 297079.32244719879,
201 | 226518.67032410353,
202 | 249432.47989138012,
203 | 340980.22110827052,
204 | 338937.33153433743,
205 | 336682.89173750172,
206 | 328075.611441381,
207 | 296185.67705954053,
208 | 225846.59813523095,
209 | 243830.9907,
210 | 340921.30348607077,
211 | 339303.69335444731,
212 | 337718.3554784974,
213 | 329797.18237272906,
214 | 298739.07686441997,
215 | 228985.18640143186,
216 | 252666.75592631695,
217 | 345078.80720261112,
218 | 343921.10769727186,
219 | 342648.40807082533,
220 | 333810.54952848947,
221 | 301690.03690043377,
222 | 230873.73931769907,
223 | 253519.08536010381,
224 | 344798.36305212229,
225 | 342507.88996250747,
226 | 334230.11962283729,
227 | 326150.75763498922,
228 | 294788.74156142137,
229 | 224730.94053317449,
230 | 248134.78313006705,
231 | 340172.55737657339,
232 | 338640.58084144635,
233 | 335775.19658487104,
234 | 328519.40511712915,
235 | 291999.015,
236 | 229180.33675673913,
237 | 253841.35757495032,
238 | 347468.12760001438,
239 | 347525.14684344508,
240 | 346348.50418293977,
241 | 338836.02629261604,
242 | 307649.52726340119,
243 | 237737.88078124833,
244 | 261287.87792423487,
245 | 353442.44421857625
246 | ],
247 | "period": 7,
248 | "upperMargins": [
249 | 328944.189615615,
250 | 297079.32244719879,
251 | 226518.67032410353,
252 | 249432.47989138012,
253 | 340980.22110827052,
254 | 338937.33153433743,
255 | 336682.89173750172,
256 | 328075.611441381,
257 | 296185.67705954053,
258 | 225846.59813523095,
259 | 243830.9907,
260 | 340921.30348607077,
261 | 339303.69335444731,
262 | 337718.3554784974,
263 | 329797.18237272906,
264 | 298739.07686441997,
265 | 228985.18640143186,
266 | 252666.75592631695,
267 | 345078.80720261112,
268 | 343921.10769727186,
269 | 342648.40807082533,
270 | 333810.54952848947,
271 | 301690.03690043377,
272 | 230873.73931769907,
273 | 253519.08536010381,
274 | 344798.36305212229,
275 | 342507.88996250747,
276 | 334230.11962283729,
277 | 326150.75763498922,
278 | 294788.74156142137,
279 | 224730.94053317449,
280 | 248134.78313006705,
281 | 340172.55737657339,
282 | 338640.58084144635,
283 | 335775.19658487104,
284 | 328519.40511712915,
285 | 291999.015,
286 | 229180.33675673913,
287 | 253841.35757495032,
288 | 347468.12760001438,
289 | 347525.14684344508,
290 | 346348.50418293977,
291 | 338836.02629261604,
292 | 307649.52726340119,
293 | 237737.88078124833,
294 | 261287.87792423487,
295 | 353442.44421857625
296 | ]
297 | }
--------------------------------------------------------------------------------
/sampledata/univariate/change-point-sample-seasonality.json:
--------------------------------------------------------------------------------
1 | {
2 | "series": [
3 | {
4 | "value": 66233,
5 | "timestamp": "2019-01-01T00:00:00Z"
6 | },
7 | {
8 | "value": 65782,
9 | "timestamp": "2019-01-02T00:00:00Z"
10 | },
11 | {
12 | "value": 56979,
13 | "timestamp": "2019-01-03T00:00:00Z"
14 | },
15 | {
16 | "value": 56589,
17 | "timestamp": "2019-01-04T00:00:00Z"
18 | },
19 | {
20 | "value": 65976,
21 | "timestamp": "2019-01-05T00:00:00Z"
22 | },
23 | {
24 | "value": 65149,
25 | "timestamp": "2019-01-06T00:00:00Z"
26 | },
27 | {
28 | "value": 65369,
29 | "timestamp": "2019-01-07T00:00:00Z"
30 | },
31 | {
32 | "value": 64193,
33 | "timestamp": "2019-01-08T00:00:00Z"
34 | },
35 | {
36 | "value": 63993,
37 | "timestamp": "2019-01-09T00:00:00Z"
38 | },
39 | {
40 | "value": 53550,
41 | "timestamp": "2019-01-10T00:00:00Z"
42 | },
43 | {
44 | "value": 53680,
45 | "timestamp": "2019-01-11T00:00:00Z"
46 | },
47 | {
48 | "value": 63150,
49 | "timestamp": "2019-01-12T00:00:00Z"
50 | },
51 | {
52 | "value": 62124,
53 | "timestamp": "2019-01-13T00:00:00Z"
54 | },
55 | {
56 | "value": 62307,
57 | "timestamp": "2019-01-14T00:00:00Z"
58 | },
59 | {
60 | "value": 60664,
61 | "timestamp": "2019-01-15T00:00:00Z"
62 | },
63 | {
64 | "value": 59835,
65 | "timestamp": "2019-01-16T00:00:00Z"
66 | },
67 | {
68 | "value": 51557,
69 | "timestamp": "2019-01-17T00:00:00Z"
70 | },
71 | {
72 | "value": 51100,
73 | "timestamp": "2019-01-18T00:00:00Z"
74 | },
75 | {
76 | "value": 59663,
77 | "timestamp": "2019-01-19T00:00:00Z"
78 | },
79 | {
80 | "value": 59759,
81 | "timestamp": "2019-01-20T00:00:00Z"
82 | },
83 | {
84 | "value": 59842,
85 | "timestamp": "2019-01-21T00:00:00Z"
86 | },
87 | {
88 | "value": 57387,
89 | "timestamp": "2019-01-22T00:00:00Z"
90 | },
91 | {
92 | "value": 55149,
93 | "timestamp": "2019-01-23T00:00:00Z"
94 | },
95 | {
96 | "value": 46122,
97 | "timestamp": "2019-01-24T00:00:00Z"
98 | },
99 | {
100 | "value": 46388,
101 | "timestamp": "2019-01-25T00:00:00Z"
102 | },
103 | {
104 | "value": 54713,
105 | "timestamp": "2019-01-26T00:00:00Z"
106 | },
107 | {
108 | "value": 54573,
109 | "timestamp": "2019-01-27T00:00:00Z"
110 | },
111 | {
112 | "value": 54745,
113 | "timestamp": "2019-01-28T00:00:00Z"
114 | },
115 | {
116 | "value": 51394,
117 | "timestamp": "2019-01-29T00:00:00Z"
118 | },
119 | {
120 | "value": 49284,
121 | "timestamp": "2019-01-30T00:00:00Z"
122 | },
123 | {
124 | "value": 37627,
125 | "timestamp": "2019-01-31T00:00:00Z"
126 | },
127 | {
128 | "value": 35431,
129 | "timestamp": "2019-02-01T00:00:00Z"
130 | },
131 | {
132 | "value": 42798,
133 | "timestamp": "2019-02-02T00:00:00Z"
134 | },
135 | {
136 | "value": 39180,
137 | "timestamp": "2019-02-03T00:00:00Z"
138 | },
139 | {
140 | "value": 36612,
141 | "timestamp": "2019-02-04T00:00:00Z"
142 | },
143 | {
144 | "value": 36504,
145 | "timestamp": "2019-02-05T00:00:00Z"
146 | },
147 | {
148 | "value": 36403,
149 | "timestamp": "2019-02-06T00:00:00Z"
150 | },
151 | {
152 | "value": 28743,
153 | "timestamp": "2019-02-07T00:00:00Z"
154 | },
155 | {
156 | "value": 28119,
157 | "timestamp": "2019-02-08T00:00:00Z"
158 | },
159 | {
160 | "value": 34786,
161 | "timestamp": "2019-02-09T00:00:00Z"
162 | },
163 | {
164 | "value": 36614,
165 | "timestamp": "2019-02-10T00:00:00Z"
166 | },
167 | {
168 | "value": 36939,
169 | "timestamp": "2019-02-11T00:00:00Z"
170 | },
171 | {
172 | "value": 35954,
173 | "timestamp": "2019-02-12T00:00:00Z"
174 | },
175 | {
176 | "value": 35603,
177 | "timestamp": "2019-02-13T00:00:00Z"
178 | },
179 | {
180 | "value": 27763,
181 | "timestamp": "2019-02-14T00:00:00Z"
182 | },
183 | {
184 | "value": 27420,
185 | "timestamp": "2019-02-15T00:00:00Z"
186 | },
187 | {
188 | "value": 36020,
189 | "timestamp": "2019-02-16T00:00:00Z"
190 | },
191 | {
192 | "value": 35656,
193 | "timestamp": "2019-02-17T00:00:00Z"
194 | },
195 | {
196 | "value": 34805,
197 | "timestamp": "2019-02-18T00:00:00Z"
198 | },
199 | {
200 | "value": 35116,
201 | "timestamp": "2019-02-19T00:00:00Z"
202 | },
203 | {
204 | "value": 33971,
205 | "timestamp": "2019-02-20T00:00:00Z"
206 | },
207 | {
208 | "value": 26169,
209 | "timestamp": "2019-02-21T00:00:00Z"
210 | },
211 | {
212 | "value": 24058,
213 | "timestamp": "2019-02-22T00:00:00Z"
214 | },
215 | {
216 | "value": 32373,
217 | "timestamp": "2019-02-23T00:00:00Z"
218 | },
219 | {
220 | "value": 32830,
221 | "timestamp": "2019-02-24T00:00:00Z"
222 | },
223 | {
224 | "value": 32388,
225 | "timestamp": "2019-02-25T00:00:00Z"
226 | },
227 | {
228 | "value": 31408,
229 | "timestamp": "2019-02-26T00:00:00Z"
230 | },
231 | {
232 | "value": 30508,
233 | "timestamp": "2019-02-27T00:00:00Z"
234 | },
235 | {
236 | "value": 23228,
237 | "timestamp": "2019-02-28T00:00:00Z"
238 | },
239 | {
240 | "value": 22991,
241 | "timestamp": "2019-03-01T00:00:00Z"
242 | },
243 | {
244 | "value": 30923,
245 | "timestamp": "2019-03-02T00:00:00Z"
246 | },
247 | {
248 | "value": 31206,
249 | "timestamp": "2019-03-03T00:00:00Z"
250 | },
251 | {
252 | "value": 31898,
253 | "timestamp": "2019-03-04T00:00:00Z"
254 | },
255 | {
256 | "value": 32257,
257 | "timestamp": "2019-03-05T00:00:00Z"
258 | },
259 | {
260 | "value": 31244,
261 | "timestamp": "2019-03-06T00:00:00Z"
262 | },
263 | {
264 | "value": 23747,
265 | "timestamp": "2019-03-07T00:00:00Z"
266 | },
267 | {
268 | "value": 23380,
269 | "timestamp": "2019-03-08T00:00:00Z"
270 | },
271 | {
272 | "value": 31213,
273 | "timestamp": "2019-03-09T00:00:00Z"
274 | },
275 | {
276 | "value": 30982,
277 | "timestamp": "2019-03-10T00:00:00Z"
278 | },
279 | {
280 | "value": 30370,
281 | "timestamp": "2019-03-11T00:00:00Z"
282 | },
283 | {
284 | "value": 30149,
285 | "timestamp": "2019-03-12T00:00:00Z"
286 | },
287 | {
288 | "value": 28708,
289 | "timestamp": "2019-03-13T00:00:00Z"
290 | },
291 | {
292 | "value": 22168,
293 | "timestamp": "2019-03-14T00:00:00Z"
294 | },
295 | {
296 | "value": 22356,
297 | "timestamp": "2019-03-15T00:00:00Z"
298 | },
299 | {
300 | "value": 30004,
301 | "timestamp": "2019-03-16T00:00:00Z"
302 | },
303 | {
304 | "value": 30289,
305 | "timestamp": "2019-03-17T00:00:00Z"
306 | },
307 | {
308 | "value": 29623,
309 | "timestamp": "2019-03-18T00:00:00Z"
310 | },
311 | {
312 | "value": 29216,
313 | "timestamp": "2019-03-19T00:00:00Z"
314 | },
315 | {
316 | "value": 28696,
317 | "timestamp": "2019-03-20T00:00:00Z"
318 | },
319 | {
320 | "value": 21626,
321 | "timestamp": "2019-03-21T00:00:00Z"
322 | },
323 | {
324 | "value": 20713,
325 | "timestamp": "2019-03-22T00:00:00Z"
326 | },
327 | {
328 | "value": 27757,
329 | "timestamp": "2019-03-23T00:00:00Z"
330 | },
331 | {
332 | "value": 28056,
333 | "timestamp": "2019-03-24T00:00:00Z"
334 | },
335 | {
336 | "value": 27490,
337 | "timestamp": "2019-03-25T00:00:00Z"
338 | },
339 | {
340 | "value": 27170,
341 | "timestamp": "2019-03-26T00:00:00Z"
342 | },
343 | {
344 | "value": 26176,
345 | "timestamp": "2019-03-27T00:00:00Z"
346 | },
347 | {
348 | "value": 20272,
349 | "timestamp": "2019-03-28T00:00:00Z"
350 | },
351 | {
352 | "value": 21499,
353 | "timestamp": "2019-03-29T00:00:00Z"
354 | },
355 | {
356 | "value": 23920,
357 | "timestamp": "2019-03-30T00:00:00Z"
358 | },
359 | {
360 | "value": 23346,
361 | "timestamp": "2019-03-31T00:00:00Z"
362 | },
363 | {
364 | "value": 24633,
365 | "timestamp": "2019-04-01T00:00:00Z"
366 | },
367 | {
368 | "value": 24624,
369 | "timestamp": "2019-04-02T00:00:00Z"
370 | },
371 | {
372 | "value": 24037,
373 | "timestamp": "2019-04-03T00:00:00Z"
374 | },
375 | {
376 | "value": 19290,
377 | "timestamp": "2019-04-04T00:00:00Z"
378 | },
379 | {
380 | "value": 25493,
381 | "timestamp": "2019-04-05T00:00:00Z"
382 | },
383 | {
384 | "value": 22500,
385 | "timestamp": "2019-04-06T00:00:00Z"
386 | },
387 | {
388 | "value": 23356,
389 | "timestamp": "2019-04-07T00:00:00Z"
390 | },
391 | {
392 | "value": 24842,
393 | "timestamp": "2019-04-08T00:00:00Z"
394 | },
395 | {
396 | "value": 24885,
397 | "timestamp": "2019-04-09T00:00:00Z"
398 | },
399 | {
400 | "value": 23994,
401 | "timestamp": "2019-04-10T00:00:00Z"
402 | },
403 | {
404 | "value": 19728,
405 | "timestamp": "2019-04-11T00:00:00Z"
406 | },
407 | {
408 | "value": 20052,
409 | "timestamp": "2019-04-12T00:00:00Z"
410 | },
411 | {
412 | "value": 24655,
413 | "timestamp": "2019-04-13T00:00:00Z"
414 | },
415 | {
416 | "value": 26187,
417 | "timestamp": "2019-04-14T00:00:00Z"
418 | },
419 | {
420 | "value": 26352,
421 | "timestamp": "2019-04-15T00:00:00Z"
422 | },
423 | {
424 | "value": 25927,
425 | "timestamp": "2019-04-16T00:00:00Z"
426 | },
427 | {
428 | "value": 25874,
429 | "timestamp": "2019-04-17T00:00:00Z"
430 | },
431 | {
432 | "value": 19819,
433 | "timestamp": "2019-04-18T00:00:00Z"
434 | },
435 | {
436 | "value": 18881,
437 | "timestamp": "2019-04-19T00:00:00Z"
438 | },
439 | {
440 | "value": 25465,
441 | "timestamp": "2019-04-20T00:00:00Z"
442 | },
443 | {
444 | "value": 26000,
445 | "timestamp": "2019-04-21T00:00:00Z"
446 | },
447 | {
448 | "value": 25886,
449 | "timestamp": "2019-04-22T00:00:00Z"
450 | },
451 | {
452 | "value": 26032,
453 | "timestamp": "2019-04-23T00:00:00Z"
454 | },
455 | {
456 | "value": 25819,
457 | "timestamp": "2019-04-24T00:00:00Z"
458 | },
459 | {
460 | "value": 19254,
461 | "timestamp": "2019-04-25T00:00:00Z"
462 | },
463 | {
464 | "value": 18665,
465 | "timestamp": "2019-04-26T00:00:00Z"
466 | }
467 | ],
468 | "granularity": "daily"
469 | }
--------------------------------------------------------------------------------
/sampledata/univariate/change-point-sample.json:
--------------------------------------------------------------------------------
1 | {
2 | "series": [
3 | {
4 | "value": 116168307,
5 | "timestamp": "2019-01-01T00:00:00Z"
6 | },
7 | {
8 | "value": 116195090,
9 | "timestamp": "2019-01-02T00:00:00Z"
10 | },
11 | {
12 | "value": 116219292,
13 | "timestamp": "2019-01-03T00:00:00Z"
14 | },
15 | {
16 | "value": 116218498,
17 | "timestamp": "2019-01-04T00:00:00Z"
18 | },
19 | {
20 | "value": 116217643,
21 | "timestamp": "2019-01-05T00:00:00Z"
22 | },
23 | {
24 | "value": 116234219,
25 | "timestamp": "2019-01-06T00:00:00Z"
26 | },
27 | {
28 | "value": 116291400,
29 | "timestamp": "2019-01-07T00:00:00Z"
30 | },
31 | {
32 | "value": 116326509,
33 | "timestamp": "2019-01-08T00:00:00Z"
34 | },
35 | {
36 | "value": 116323167,
37 | "timestamp": "2019-01-09T00:00:00Z"
38 | },
39 | {
40 | "value": 116360790,
41 | "timestamp": "2019-01-10T00:00:00Z"
42 | },
43 | {
44 | "value": 116367491,
45 | "timestamp": "2019-01-11T00:00:00Z"
46 | },
47 | {
48 | "value": 116371082,
49 | "timestamp": "2019-01-12T00:00:00Z"
50 | },
51 | {
52 | "value": 116380405,
53 | "timestamp": "2019-01-13T00:00:00Z"
54 | },
55 | {
56 | "value": 116393919,
57 | "timestamp": "2019-01-14T00:00:00Z"
58 | },
59 | {
60 | "value": 116443750,
61 | "timestamp": "2019-01-15T00:00:00Z"
62 | },
63 | {
64 | "value": 116467267,
65 | "timestamp": "2019-01-16T00:00:00Z"
66 | },
67 | {
68 | "value": 116497910,
69 | "timestamp": "2019-01-17T00:00:00Z"
70 | },
71 | {
72 | "value": 116499861,
73 | "timestamp": "2019-01-18T00:00:00Z"
74 | },
75 | {
76 | "value": 116500538,
77 | "timestamp": "2019-01-19T00:00:00Z"
78 | },
79 | {
80 | "value": 116532052,
81 | "timestamp": "2019-01-20T00:00:00Z"
82 | },
83 | {
84 | "value": 116559282,
85 | "timestamp": "2019-01-21T00:00:00Z"
86 | },
87 | {
88 | "value": 116597249,
89 | "timestamp": "2019-01-22T00:00:00Z"
90 | },
91 | {
92 | "value": 118036892,
93 | "timestamp": "2019-01-23T00:00:00Z"
94 | },
95 | {
96 | "value": 118090207,
97 | "timestamp": "2019-01-24T00:00:00Z"
98 | },
99 | {
100 | "value": 118105517,
101 | "timestamp": "2019-01-25T00:00:00Z"
102 | },
103 | {
104 | "value": 118107624,
105 | "timestamp": "2019-01-26T00:00:00Z"
106 | },
107 | {
108 | "value": 118138073,
109 | "timestamp": "2019-01-27T00:00:00Z"
110 | },
111 | {
112 | "value": 118164752,
113 | "timestamp": "2019-01-28T00:00:00Z"
114 | },
115 | {
116 | "value": 118150854,
117 | "timestamp": "2019-01-29T00:00:00Z"
118 | },
119 | {
120 | "value": 118168111,
121 | "timestamp": "2019-01-30T00:00:00Z"
122 | },
123 | {
124 | "value": 118281715,
125 | "timestamp": "2019-01-31T00:00:00Z"
126 | },
127 | {
128 | "value": 118255480,
129 | "timestamp": "2019-02-01T00:00:00Z"
130 | },
131 | {
132 | "value": 118256700,
133 | "timestamp": "2019-02-02T00:00:00Z"
134 | },
135 | {
136 | "value": 118256692,
137 | "timestamp": "2019-02-03T00:00:00Z"
138 | },
139 | {
140 | "value": 118261555,
141 | "timestamp": "2019-02-04T00:00:00Z"
142 | },
143 | {
144 | "value": 118271556,
145 | "timestamp": "2019-02-05T00:00:00Z"
146 | },
147 | {
148 | "value": 118304847,
149 | "timestamp": "2019-02-06T00:00:00Z"
150 | },
151 | {
152 | "value": 119575122,
153 | "timestamp": "2019-02-07T00:00:00Z"
154 | },
155 | {
156 | "value": 119575288,
157 | "timestamp": "2019-02-08T00:00:00Z"
158 | },
159 | {
160 | "value": 119577225,
161 | "timestamp": "2019-02-09T00:00:00Z"
162 | },
163 | {
164 | "value": 119687273,
165 | "timestamp": "2019-02-10T00:00:00Z"
166 | },
167 | {
168 | "value": 119696443,
169 | "timestamp": "2019-02-11T00:00:00Z"
170 | },
171 | {
172 | "value": 119708919,
173 | "timestamp": "2019-02-12T00:00:00Z"
174 | },
175 | {
176 | "value": 119742399,
177 | "timestamp": "2019-02-13T00:00:00Z"
178 | },
179 | {
180 | "value": 119783758,
181 | "timestamp": "2019-02-14T00:00:00Z"
182 | },
183 | {
184 | "value": 119778552,
185 | "timestamp": "2019-02-15T00:00:00Z"
186 | },
187 | {
188 | "value": 119777165,
189 | "timestamp": "2019-02-16T00:00:00Z"
190 | },
191 | {
192 | "value": 119839611,
193 | "timestamp": "2019-02-17T00:00:00Z"
194 | },
195 | {
196 | "value": 118478044,
197 | "timestamp": "2019-02-18T00:00:00Z"
198 | },
199 | {
200 | "value": 118510659,
201 | "timestamp": "2019-02-19T00:00:00Z"
202 | },
203 | {
204 | "value": 118536890,
205 | "timestamp": "2019-02-20T00:00:00Z"
206 | },
207 | {
208 | "value": 120377808,
209 | "timestamp": "2019-02-21T00:00:00Z"
210 | },
211 | {
212 | "value": 120379137,
213 | "timestamp": "2019-02-22T00:00:00Z"
214 | },
215 | {
216 | "value": 120380093,
217 | "timestamp": "2019-02-23T00:00:00Z"
218 | },
219 | {
220 | "value": 120409909,
221 | "timestamp": "2019-02-24T00:00:00Z"
222 | },
223 | {
224 | "value": 120481097,
225 | "timestamp": "2019-02-25T00:00:00Z"
226 | },
227 | {
228 | "value": 120525030,
229 | "timestamp": "2019-02-26T00:00:00Z"
230 | },
231 | {
232 | "value": 120554993,
233 | "timestamp": "2019-02-27T00:00:00Z"
234 | },
235 | {
236 | "value": 120396587,
237 | "timestamp": "2019-02-28T00:00:00Z"
238 | },
239 | {
240 | "value": 120389070,
241 | "timestamp": "2019-03-01T00:00:00Z"
242 | }
243 | ],
244 | "granularity": "daily",
245 | "stableTrendWindow": 5,
246 | "threshold": 0.9
247 | }
--------------------------------------------------------------------------------
/sampledata/univariate/latest-point-response.json:
--------------------------------------------------------------------------------
1 | {
2 | "expectedValue": 35344244.421857625,
3 | "isAnomaly": false,
4 | "isNegativeAnomaly": false,
5 | "isPositiveAnomaly": false,
6 | "lowerMargin": 353442.44421857625,
7 | "period": 7,
8 | "suggestedWindow": 29,
9 | "upperMargin": 353442.44421857625
10 | }
--------------------------------------------------------------------------------
/sampledata/univariate/request-data.csv:
--------------------------------------------------------------------------------
1 | 2018-03-01T00:00:00Z,32858923
2 | 2018-03-02T00:00:00Z,29615278
3 | 2018-03-03T00:00:00Z,22839355
4 | 2018-03-04T00:00:00Z,25948736
5 | 2018-03-05T00:00:00Z,34139159
6 | 2018-03-06T00:00:00Z,33843985
7 | 2018-03-07T00:00:00Z,33637661
8 | 2018-03-08T00:00:00Z,32627350
9 | 2018-03-09T00:00:00Z,29881076
10 | 2018-03-10T00:00:00Z,22681575
11 | 2018-03-11T00:00:00Z,24629393
12 | 2018-03-12T00:00:00Z,34010679
13 | 2018-03-13T00:00:00Z,33893888
14 | 2018-03-14T00:00:00Z,33760076
15 | 2018-03-15T00:00:00Z,33093515
16 | 2018-03-16T00:00:00Z,29945555
17 | 2018-03-17T00:00:00Z,22676212
18 | 2018-03-18T00:00:00Z,25262514
19 | 2018-03-19T00:00:00Z,33631649
20 | 2018-03-20T00:00:00Z,34468310
21 | 2018-03-21T00:00:00Z,34212281
22 | 2018-03-22T00:00:00Z,38144434
23 | 2018-03-23T00:00:00Z,34662949
24 | 2018-03-24T00:00:00Z,24623684
25 | 2018-03-25T00:00:00Z,26530491
26 | 2018-03-26T00:00:00Z,35445003
27 | 2018-03-27T00:00:00Z,34250789
28 | 2018-03-28T00:00:00Z,33423012
29 | 2018-03-29T00:00:00Z,30744783
30 | 2018-03-30T00:00:00Z,25825128
31 | 2018-03-31T00:00:00Z,21244209
32 | 2018-04-01T00:00:00Z,22576956
33 | 2018-04-02T00:00:00Z,31957221
34 | 2018-04-03T00:00:00Z,33841228
35 | 2018-04-04T00:00:00Z,33554483
36 | 2018-04-05T00:00:00Z,32383350
37 | 2018-04-06T00:00:00Z,29494850
38 | 2018-04-07T00:00:00Z,22815534
39 | 2018-04-08T00:00:00Z,25557267
40 | 2018-04-09T00:00:00Z,34858252
41 | 2018-04-10T00:00:00Z,34750597
42 | 2018-04-11T00:00:00Z,34717956
43 | 2018-04-12T00:00:00Z,34132534
44 | 2018-04-13T00:00:00Z,30762236
45 | 2018-04-14T00:00:00Z,22504059
46 | 2018-04-15T00:00:00Z,26149060
47 | 2018-04-16T00:00:00Z,35250105
--------------------------------------------------------------------------------
/sampledata/univariate/request-data.json:
--------------------------------------------------------------------------------
1 | {
2 | "granularity": "daily",
3 | "series": [
4 | {
5 | "timestamp": "2018-03-01T00:00:00Z",
6 | "value": 32858923
7 | },
8 | {
9 | "timestamp": "2018-03-02T00:00:00Z",
10 | "value": 29615278
11 | },
12 | {
13 | "timestamp": "2018-03-03T00:00:00Z",
14 | "value": 22839355
15 | },
16 | {
17 | "timestamp": "2018-03-04T00:00:00Z",
18 | "value": 25948736
19 | },
20 | {
21 | "timestamp": "2018-03-05T00:00:00Z",
22 | "value": 34139159
23 | },
24 | {
25 | "timestamp": "2018-03-06T00:00:00Z",
26 | "value": 33843985
27 | },
28 | {
29 | "timestamp": "2018-03-07T00:00:00Z",
30 | "value": 33637661
31 | },
32 | {
33 | "timestamp": "2018-03-08T00:00:00Z",
34 | "value": 32627350
35 | },
36 | {
37 | "timestamp": "2018-03-09T00:00:00Z",
38 | "value": 29881076
39 | },
40 | {
41 | "timestamp": "2018-03-10T00:00:00Z",
42 | "value": 22681575
43 | },
44 | {
45 | "timestamp": "2018-03-11T00:00:00Z",
46 | "value": 24629393
47 | },
48 | {
49 | "timestamp": "2018-03-12T00:00:00Z",
50 | "value": 34010679
51 | },
52 | {
53 | "timestamp": "2018-03-13T00:00:00Z",
54 | "value": 33893888
55 | },
56 | {
57 | "timestamp": "2018-03-14T00:00:00Z",
58 | "value": 33760076
59 | },
60 | {
61 | "timestamp": "2018-03-15T00:00:00Z",
62 | "value": 33093515
63 | },
64 | {
65 | "timestamp": "2018-03-16T00:00:00Z",
66 | "value": 29945555
67 | },
68 | {
69 | "timestamp": "2018-03-17T00:00:00Z",
70 | "value": 22676212
71 | },
72 | {
73 | "timestamp": "2018-03-18T00:00:00Z",
74 | "value": 25262514
75 | },
76 | {
77 | "timestamp": "2018-03-19T00:00:00Z",
78 | "value": 33631649
79 | },
80 | {
81 | "timestamp": "2018-03-20T00:00:00Z",
82 | "value": 34468310
83 | },
84 | {
85 | "timestamp": "2018-03-21T00:00:00Z",
86 | "value": 34212281
87 | },
88 | {
89 | "timestamp": "2018-03-22T00:00:00Z",
90 | "value": 38144434
91 | },
92 | {
93 | "timestamp": "2018-03-23T00:00:00Z",
94 | "value": 34662949
95 | },
96 | {
97 | "timestamp": "2018-03-24T00:00:00Z",
98 | "value": 24623684
99 | },
100 | {
101 | "timestamp": "2018-03-25T00:00:00Z",
102 | "value": 26530491
103 | },
104 | {
105 | "timestamp": "2018-03-26T00:00:00Z",
106 | "value": 35445003
107 | },
108 | {
109 | "timestamp": "2018-03-27T00:00:00Z",
110 | "value": 34250789
111 | },
112 | {
113 | "timestamp": "2018-03-28T00:00:00Z",
114 | "value": 33423012
115 | },
116 | {
117 | "timestamp": "2018-03-29T00:00:00Z",
118 | "value": 30744783
119 | },
120 | {
121 | "timestamp": "2018-03-30T00:00:00Z",
122 | "value": 25825128
123 | },
124 | {
125 | "timestamp": "2018-03-31T00:00:00Z",
126 | "value": 21244209
127 | },
128 | {
129 | "timestamp": "2018-04-01T00:00:00Z",
130 | "value": 22576956
131 | },
132 | {
133 | "timestamp": "2018-04-02T00:00:00Z",
134 | "value": 31957221
135 | },
136 | {
137 | "timestamp": "2018-04-03T00:00:00Z",
138 | "value": 33841228
139 | },
140 | {
141 | "timestamp": "2018-04-04T00:00:00Z",
142 | "value": 33554483
143 | },
144 | {
145 | "timestamp": "2018-04-05T00:00:00Z",
146 | "value": 32383350
147 | },
148 | {
149 | "timestamp": "2018-04-06T00:00:00Z",
150 | "value": 29494850
151 | },
152 | {
153 | "timestamp": "2018-04-07T00:00:00Z",
154 | "value": 22815534
155 | },
156 | {
157 | "timestamp": "2018-04-08T00:00:00Z",
158 | "value": 25557267
159 | },
160 | {
161 | "timestamp": "2018-04-09T00:00:00Z",
162 | "value": 34858252
163 | },
164 | {
165 | "timestamp": "2018-04-10T00:00:00Z",
166 | "value": 34750597
167 | },
168 | {
169 | "timestamp": "2018-04-11T00:00:00Z",
170 | "value": 34717956
171 | },
172 | {
173 | "timestamp": "2018-04-12T00:00:00Z",
174 | "value": 34132534
175 | },
176 | {
177 | "timestamp": "2018-04-13T00:00:00Z",
178 | "value": 30762236
179 | },
180 | {
181 | "timestamp": "2018-04-14T00:00:00Z",
182 | "value": 22504059
183 | },
184 | {
185 | "timestamp": "2018-04-15T00:00:00Z",
186 | "value": 26149060
187 | },
188 | {
189 | "timestamp": "2018-04-16T00:00:00Z",
190 | "value": 35250105
191 | }
192 | ]
193 | }
--------------------------------------------------------------------------------
/sampledata/univariate/univariate_sample_daily.json:
--------------------------------------------------------------------------------
1 | {
2 | "period": 7,
3 | "granularity": "daily",
4 | "series": [
5 | {
6 | "timestamp": "2018-03-01T00:00:00Z",
7 | "value": 32858923
8 | },
9 | {
10 | "timestamp": "2018-03-02T00:00:00Z",
11 | "value": 29615278
12 | },
13 | {
14 | "timestamp": "2018-03-03T00:00:00Z",
15 | "value": 22839355
16 | },
17 | {
18 | "timestamp": "2018-03-04T00:00:00Z",
19 | "value": 25948736
20 | },
21 | {
22 | "timestamp": "2018-03-05T00:00:00Z",
23 | "value": 34139159
24 | },
25 | {
26 | "timestamp": "2018-03-06T00:00:00Z",
27 | "value": 33843985
28 | },
29 | {
30 | "timestamp": "2018-03-07T00:00:00Z",
31 | "value": 33637661
32 | },
33 | {
34 | "timestamp": "2018-03-08T00:00:00Z",
35 | "value": 32627350
36 | },
37 | {
38 | "timestamp": "2018-03-09T00:00:00Z",
39 | "value": 29881076
40 | },
41 | {
42 | "timestamp": "2018-03-10T00:00:00Z",
43 | "value": 22681575
44 | },
45 | {
46 | "timestamp": "2018-03-11T00:00:00Z",
47 | "value": 24629393
48 | },
49 | {
50 | "timestamp": "2018-03-12T00:00:00Z",
51 | "value": 34010679
52 | },
53 | {
54 | "timestamp": "2018-03-13T00:00:00Z",
55 | "value": 33893888
56 | },
57 | {
58 | "timestamp": "2018-03-14T00:00:00Z",
59 | "value": 33760076
60 | },
61 | {
62 | "timestamp": "2018-03-15T00:00:00Z",
63 | "value": 33093515
64 | },
65 | {
66 | "timestamp": "2018-03-16T00:00:00Z",
67 | "value": 29945555
68 | },
69 | {
70 | "timestamp": "2018-03-17T00:00:00Z",
71 | "value": 22676212
72 | },
73 | {
74 | "timestamp": "2018-03-18T00:00:00Z",
75 | "value": 25262514
76 | },
77 | {
78 | "timestamp": "2018-03-19T00:00:00Z",
79 | "value": 33631649
80 | },
81 | {
82 | "timestamp": "2018-03-20T00:00:00Z",
83 | "value": 34468310
84 | },
85 | {
86 | "timestamp": "2018-03-21T00:00:00Z",
87 | "value": 34212281
88 | },
89 | {
90 | "timestamp": "2018-03-22T00:00:00Z",
91 | "value": 38144434
92 | },
93 | {
94 | "timestamp": "2018-03-23T00:00:00Z",
95 | "value": 34662949
96 | },
97 | {
98 | "timestamp": "2018-03-24T00:00:00Z",
99 | "value": 24623684
100 | },
101 | {
102 | "timestamp": "2018-03-25T00:00:00Z",
103 | "value": 26530491
104 | },
105 | {
106 | "timestamp": "2018-03-26T00:00:00Z",
107 | "value": 35445003
108 | },
109 | {
110 | "timestamp": "2018-03-27T00:00:00Z",
111 | "value": 34250789
112 | },
113 | {
114 | "timestamp": "2018-03-28T00:00:00Z",
115 | "value": 33423012
116 | },
117 | {
118 | "timestamp": "2018-03-29T00:00:00Z",
119 | "value": 30744783
120 | },
121 | {
122 | "timestamp": "2018-03-30T00:00:00Z",
123 | "value": 25825128
124 | },
125 | {
126 | "timestamp": "2018-03-31T00:00:00Z",
127 | "value": 21244209
128 | },
129 | {
130 | "timestamp": "2018-04-01T00:00:00Z",
131 | "value": 22576956
132 | },
133 | {
134 | "timestamp": "2018-04-02T00:00:00Z",
135 | "value": 31957221
136 | },
137 | {
138 | "timestamp": "2018-04-03T00:00:00Z",
139 | "value": 33841228
140 | },
141 | {
142 | "timestamp": "2018-04-04T00:00:00Z",
143 | "value": 33554483
144 | },
145 | {
146 | "timestamp": "2018-04-05T00:00:00Z",
147 | "value": 32383350
148 | },
149 | {
150 | "timestamp": "2018-04-06T00:00:00Z",
151 | "value": 29494850
152 | },
153 | {
154 | "timestamp": "2018-04-07T00:00:00Z",
155 | "value": 22815534
156 | },
157 | {
158 | "timestamp": "2018-04-08T00:00:00Z",
159 | "value": 25557267
160 | },
161 | {
162 | "timestamp": "2018-04-09T00:00:00Z",
163 | "value": 34858252
164 | },
165 | {
166 | "timestamp": "2018-04-10T00:00:00Z",
167 | "value": 34750597
168 | },
169 | {
170 | "timestamp": "2018-04-11T00:00:00Z",
171 | "value": 34717956
172 | },
173 | {
174 | "timestamp": "2018-04-12T00:00:00Z",
175 | "value": 34132534
176 | },
177 | {
178 | "timestamp": "2018-04-13T00:00:00Z",
179 | "value": 30762236
180 | },
181 | {
182 | "timestamp": "2018-04-14T00:00:00Z",
183 | "value": 22504059
184 | },
185 | {
186 | "timestamp": "2018-04-15T00:00:00Z",
187 | "value": 26149060
188 | },
189 | {
190 | "timestamp": "2018-04-16T00:00:00Z",
191 | "value": 35250105
192 | }
193 | ]
194 | }
--------------------------------------------------------------------------------
/samples-multivariate/MultivariateSample.java:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | package com.azure.ai.anomalydetector;
5 |
6 | import com.azure.ai.anomalydetector.models.*;
7 | import com.azure.core.credential.AzureKeyCredential;
8 | import com.azure.core.http.*;
9 | import com.azure.core.http.policy.*;
10 | import com.azure.core.http.rest.PagedIterable;
11 | import com.azure.core.http.rest.PagedResponse;
12 | import com.azure.core.http.rest.Response;
13 | import com.azure.core.http.rest.StreamResponse;
14 | import com.azure.core.util.Context;
15 | import reactor.core.publisher.Flux;
16 |
17 | import java.io.FileNotFoundException;
18 | import java.io.FileOutputStream;
19 | import java.io.IOException;
20 | import java.io.UncheckedIOException;
21 | import java.nio.ByteBuffer;
22 | import java.nio.file.Files;
23 | import java.nio.file.Path;
24 | import java.nio.file.Paths;
25 | import java.time.*;
26 | import java.time.format.DateTimeFormatter;
27 | import java.util.Iterator;
28 | import java.util.List;
29 | import java.util.UUID;
30 | import java.util.concurrent.TimeUnit;
31 | import java.util.stream.Collectors;
32 |
33 |
34 | public class MultivariateSample {
35 | private static void close(FileOutputStream fos) {
36 | try {
37 | fos.close();
38 | System.out.println("closed");
39 | } catch (IOException e) {
40 | throw new UncheckedIOException(e);
41 | }
42 | }
43 |
44 | private static void write(FileOutputStream fos, ByteBuffer b) {
45 | try {
46 | fos.write(b.array());
47 | } catch (IOException e) {
48 | throw new UncheckedIOException(e);
49 | }
50 | }
51 |
52 | private static AnomalyDetectorClient getClient(String endpoint, String key) {
53 | HttpHeaders headers = new HttpHeaders()
54 | .put("Accept", ContentType.APPLICATION_JSON);
55 |
56 | HttpPipelinePolicy authPolicy = new AzureKeyCredentialPolicy("Ocp-Apim-Subscription-Key",
57 | new AzureKeyCredential(key));
58 | AddHeadersPolicy addHeadersPolicy = new AddHeadersPolicy(headers);
59 |
60 | HttpPipeline httpPipeline = new HttpPipelineBuilder().httpClient(HttpClient.createDefault())
61 | .policies(authPolicy, addHeadersPolicy).build();
62 | // Instantiate a client that will be used to call the service.
63 | HttpLogOptions httpLogOptions = new HttpLogOptions();
64 | httpLogOptions.setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS);
65 |
66 | AnomalyDetectorClient anomalyDetectorClient = new AnomalyDetectorClientBuilder()
67 | .pipeline(httpPipeline)
68 | .endpoint(endpoint)
69 | .httpLogOptions(httpLogOptions)
70 | .buildClient();
71 | return anomalyDetectorClient;
72 | }
73 |
74 | private static UUID getModelId(AnomalyDetectorClient client, ModelInfo request) {
75 | TrainMultivariateModelResponse trainMultivariateModelResponse = client.trainMultivariateModelWithResponse(request, Context.NONE);
76 | String header = trainMultivariateModelResponse.getDeserializedHeaders().getLocation();
77 | String[] substring = header.split("/");
78 | return UUID.fromString(substring[substring.length - 1]);
79 | }
80 |
81 | private static Response getModelStatus(AnomalyDetectorClient client, UUID model_id) {
82 | Response response = client.getMultivariateModelWithResponse(model_id, Context.NONE);
83 | System.out.println("training");
84 | return response;
85 | }
86 |
87 | private static UUID getResultId(AnomalyDetectorClient client, UUID modelId, DetectionRequest detectionRequest) {
88 | DetectAnomalyResponse detectAnomalyResponse = client.detectAnomalyWithResponse(modelId, detectionRequest, Context.NONE);
89 | String location = detectAnomalyResponse.getDeserializedHeaders().getLocation();
90 | String[] substring = location.split("/");
91 | return UUID.fromString(substring[substring.length - 1]);
92 | }
93 |
94 | private static DetectionResult getInferenceStatus(AnomalyDetectorClient client, UUID resultId) {
95 | DetectionResult response = client.getDetectionResult(resultId);
96 | return response;
97 | }
98 |
99 | private static void ExportResult(AnomalyDetectorClient client, UUID modelId, String path) throws FileNotFoundException {
100 | StreamResponse response = client.exportModelWithResponse(modelId, Context.NONE);
101 | Flux value = response.getValue();
102 | FileOutputStream bw = new FileOutputStream(path);
103 | value.subscribe(s -> write(bw, s), (e) -> close(bw), () -> close(bw));
104 | }
105 |
106 | private static void GetModelList(AnomalyDetectorClient client, Integer skip, Integer top){
107 | PagedIterable response = client.listMultivariateModel(skip, top);
108 | Iterator> ite = response.iterableByPage().iterator();
109 | int i =1;
110 | while(ite.hasNext()){
111 | PagedResponse items= ite.next();
112 | System.out.println("The result in the page "+i);
113 | i++;
114 | for (ModelSnapshot item: items.getValue()
115 | ) {
116 | System.out.println("\t"+item.getModelId());
117 | }
118 | break;
119 | }
120 | }
121 |
122 |
123 | public static void main(final String[] args) throws IOException, InterruptedException {
124 | String endpoint = "";
125 | String key = "";
126 | //Get multivariate client
127 | AnomalyDetectorClient client = getClient(endpoint, key);
128 |
129 |
130 | //Start training and get Model ID
131 | Integer window = 28;
132 | AlignMode alignMode = AlignMode.OUTER;
133 | FillNAMethod fillNAMethod = FillNAMethod.LINEAR;
134 | Integer paddingValue = 0;
135 | AlignPolicy alignPolicy = new AlignPolicy()
136 | .setAlignMode(alignMode)
137 | .setFillNAMethod(fillNAMethod)
138 | .setPaddingValue(paddingValue);
139 | String source = "";
140 | OffsetDateTime startTime = OffsetDateTime.of(2021, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
141 | OffsetDateTime endTime = OffsetDateTime.of(2021, 1, 2, 12, 0, 0, 0, ZoneOffset.UTC);
142 | String displayName = "";
143 | ModelInfo request = new ModelInfo()
144 | .setSlidingWindow(window)
145 | .setAlignPolicy(alignPolicy)
146 | .setSource(source)
147 | .setStartTime(startTime)
148 | .setEndTime(endTime)
149 | .setDisplayName(displayName);
150 | UUID modelId = getModelId(client, request);
151 | System.out.println(modelId);
152 |
153 | //Check model status until the model get ready
154 | Response trainResponse;
155 | while (true) {
156 | trainResponse = getModelStatus(client, modelId);
157 | ModelStatus modelStatus = trainResponse.getValue().getModelInfo().getStatus();
158 | if (modelStatus == ModelStatus.READY || modelStatus == ModelStatus.FAILED) {
159 | break;
160 | }
161 | TimeUnit.SECONDS.sleep(10);
162 | }
163 |
164 | if (trainResponse.getValue().getModelInfo().getStatus() != ModelStatus.READY){
165 | System.out.println("Training failed.");
166 | List errorMessages = trainResponse.getValue().getModelInfo().getErrors();
167 | for (ErrorResponse errorMessage : errorMessages) {
168 | System.out.println("Error code: " + errorMessage.getCode());
169 | System.out.println("Error message: " + errorMessage.getMessage());
170 | }
171 | return;
172 | }
173 |
174 | //Start inference and get the Result ID
175 | DetectionRequest detectionRequest = new DetectionRequest().setSource(source).setStartTime(startTime).setEndTime(endTime);
176 | UUID resultId = getResultId(client, modelId, detectionRequest);
177 |
178 | //Check inference status until the result get ready
179 | DetectionResult detectionResult;
180 | while (true) {
181 | detectionResult = getInferenceStatus(client, resultId);
182 | DetectionStatus detectionStatus = detectionResult.getSummary().getStatus();
183 | if (detectionStatus == DetectionStatus.READY || detectionStatus == DetectionStatus.FAILED) {
184 | break;
185 | }
186 | TimeUnit.SECONDS.sleep(10);
187 | }
188 |
189 | if (detectionResult.getSummary().getStatus() != DetectionStatus.READY){
190 | System.out.println("Inference failed");
191 | List detectErrorMessages = detectionResult.getSummary().getErrors();
192 | for (ErrorResponse errorMessage : detectErrorMessages) {
193 | System.out.println("Error code: " + errorMessage.getCode());
194 | System.out.println("Error message: " + errorMessage.getMessage());
195 | }
196 | return;
197 | }
198 |
199 | //Export result files to local
200 | String path = "";
201 | ExportResult(client, modelId, path);
202 |
203 |
204 | //Delete model
205 | Response deleteMultivariateModelWithResponse = client.deleteMultivariateModelWithResponse(modelId, Context.NONE);
206 |
207 |
208 | //Get model list
209 | Integer skip = 0;
210 | Integer top = 5;
211 | GetModelList(client, skip, top);
212 | }
213 | }
--------------------------------------------------------------------------------
/samples-multivariate/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_type: sample
3 | languages:
4 | - csharp
5 | - java
6 | - python
7 | - javascript
8 | products:
9 | - azure
10 | - ai-services
11 | - azure-anomaly-detector
12 | description: "This repository contains samples for Anomaly Detector multivariate API. The Anomaly Detector multivariate API enables you to monitor and find abnormalities in your time series data by automatically identifying and applying the correct statistical models, regardless of industry, scenario, or data volume."
13 | ---
14 |
15 | # Anomaly Detector multivariate API Samples
16 |
17 | This repository contains samples for [Anomaly Detector API](https://aka.ms/anomalydetector). The Anomaly Detector multivariate API enables you to monitor and find abnormalities in your time series data by automatically identifying and applying the correct statistical models, regardless of industry, scenario, or data volume. Using your time series data, the API can find anomalies as a batch throughout your data, or determine if your latest data point is an anomaly.
18 |
19 | ## Prerequisites
20 |
21 | You must have an [Anomaly Detector API resource](https://aka.ms/adnew). Before continuing, you will need the API key and the endpoint from your Azure dashboard.
22 | 
23 |
24 | Or you could create a 7-day free resource of Anomaly Detector from [here](https://azure.microsoft.com/en-us/try/cognitive-services/my-apis/).
25 |
26 | ## Data requirements
27 |
28 | Example data is provided in this repository, along with example JSON responses from the API. To use the Anomaly Detector API on your time series data, ensure the following:
29 |
30 | * Your data points are separated by the same interval, with no missing points.
31 | * Your data has at least 13 data points if it doesn't have clear perodicity.
32 | * Your data has at least 4 periods if it does have clear perodicity.
33 | Please read [Best practices for using the Anomaly Detector API](https://aka.ms/adbest) for details.
34 |
35 | ## Sample Code
36 |
37 | | Langauge | Sample Code|
38 | |:---------|:-----------|
39 | | Python | [Sample](./Python) |
40 | | C# | [Sample](./CSharp) |
41 | | Java | [Sample](./Java) |
42 | | JavaScript| [Sample](./JavaScript) |
43 |
44 | ## Example data
45 | Properly formatted multivariate sample data can be found in this [zip file](./multivariate_sample_data)
46 |
--------------------------------------------------------------------------------
/samples-multivariate/Sample_multivaraiate_detect.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | using System;
5 | using System.Collections.Generic;
6 | using System.Drawing.Text;
7 | using System.IO;
8 | using System.Linq;
9 | using System.Linq.Expressions;
10 | using System.Net.NetworkInformation;
11 | using System.Reflection;
12 | using System.Text;
13 | using System.Threading.Tasks;
14 | using Azure.AI.AnomalyDetector.Models;
15 | using Azure.Core.TestFramework;
16 | using Microsoft.Identity.Client;
17 | using NUnit.Framework;
18 |
19 | namespace Azure.AI.AnomalyDetector.Tests.Samples
20 | {
21 | public partial class AnomalyDetectorSamples : SamplesBase
22 | {
23 | [Test]
24 | public async Task MultivariateDetect()
25 | {
26 | //read endpoint and apiKey
27 | string endpoint = TestEnvironment.Endpoint;
28 | string apiKey = TestEnvironment.ApiKey;
29 | string datasource = TestEnvironment.DataSource;
30 | Console.WriteLine(endpoint);
31 | var endpointUri = new Uri(endpoint);
32 | var credential = new AzureKeyCredential(apiKey);
33 |
34 | //create client
35 | AnomalyDetectorClient client = new AnomalyDetectorClient(endpointUri, credential);
36 |
37 | // train
38 | TimeSpan offset = new TimeSpan(0);
39 | DateTimeOffset start_time = new DateTimeOffset(2021, 1, 1, 0, 0, 0, offset);
40 | DateTimeOffset end_time = new DateTimeOffset(2021, 1, 2, 12, 0, 0, offset);
41 | Guid? model_id_raw = null;
42 | try
43 | {
44 | model_id_raw = await trainAsync(client, datasource, start_time, end_time).ConfigureAwait(false);
45 | Console.WriteLine(model_id_raw);
46 | Guid model_id = model_id_raw.GetValueOrDefault();
47 |
48 | // detect
49 | start_time = end_time;
50 | end_time = new DateTimeOffset(2021, 1, 3, 0, 0, 0, offset);
51 | DetectionResult result = await detectAsync(client, datasource, model_id, start_time, end_time).ConfigureAwait(false);
52 | if (result != null)
53 | {
54 | Console.WriteLine(String.Format("Result ID: {0}", result.ResultId));
55 | Console.WriteLine(String.Format("Result summary: {0}", result.Summary));
56 | Console.WriteLine(String.Format("Result length: {0}", result.Results.Count));
57 | }
58 |
59 | // export model
60 | await exportAsync(client, model_id).ConfigureAwait(false);
61 |
62 | // delete
63 | await deleteAsync(client, model_id).ConfigureAwait(false);
64 | }
65 | catch (Exception e)
66 | {
67 | String msg = String.Format("Multivariate error. {0}", e.Message);
68 | if (model_id_raw != null)
69 | {
70 | await deleteAsync(client, model_id_raw.GetValueOrDefault()).ConfigureAwait(false);
71 | }
72 | Console.WriteLine(msg);
73 | throw new Exception(msg);
74 | }
75 | }
76 |
77 | #region Snippet:TrainMultivariateModel
78 | private async Task trainAsync(AnomalyDetectorClient client, string datasource, DateTimeOffset start_time, DateTimeOffset end_time)
79 | {
80 | try
81 | {
82 | Console.WriteLine("Training new model...");
83 |
84 | int model_number = await getModelNumberAsync(client, false).ConfigureAwait(false);
85 | Console.WriteLine(String.Format("{0} available models before training.", model_number));
86 |
87 | ModelInfo data_feed = new ModelInfo(datasource, start_time, end_time);
88 | Response response_header = client.TrainMultivariateModel(data_feed);
89 | response_header.Headers.TryGetValue("Location", out string trained_model_id_path);
90 | Guid trained_model_id = Guid.Parse(trained_model_id_path.Split('/').LastOrDefault());
91 | Console.WriteLine(trained_model_id);
92 |
93 | // Wait until the model is ready. It usually takes several minutes
94 | Response get_response = await client.GetMultivariateModelAsync(trained_model_id).ConfigureAwait(false);
95 | while (get_response.Value.ModelInfo.Status != ModelStatus.Ready & get_response.Value.ModelInfo.Status != ModelStatus.Failed)
96 | {
97 | System.Threading.Thread.Sleep(10000);
98 | get_response = await client.GetMultivariateModelAsync(trained_model_id).ConfigureAwait(false);
99 | Console.WriteLine(String.Format("model_id: {0}, createdTime: {1}, lastUpdateTime: {2}, status: {3}.", get_response.Value.ModelId, get_response.Value.CreatedTime, get_response.Value.LastUpdatedTime, get_response.Value.ModelInfo.Status));
100 | }
101 |
102 | if (get_response.Value.ModelInfo.Status != ModelStatus.Ready)
103 | {
104 | Console.WriteLine(String.Format("Trainig failed."));
105 | IReadOnlyList errors = get_response.Value.ModelInfo.Errors;
106 | foreach (ErrorResponse error in errors)
107 | {
108 | Console.WriteLine(String.Format("Error code: {0}.", error.Code));
109 | Console.WriteLine(String.Format("Error message: {0}.", error.Message));
110 | }
111 | throw new Exception("Training failed.");
112 | }
113 |
114 | model_number = await getModelNumberAsync(client).ConfigureAwait(false);
115 | Console.WriteLine(String.Format("{0} available models after training.", model_number));
116 | return trained_model_id;
117 | }
118 | catch (Exception e)
119 | {
120 | Console.WriteLine(String.Format("Train error. {0}", e.Message));
121 | throw new Exception(e.Message);
122 | }
123 | }
124 | #endregion
125 |
126 | #region Snippet:DetectMultivariateAnomaly
127 | private async Task detectAsync(AnomalyDetectorClient client, string datasource, Guid model_id,DateTimeOffset start_time, DateTimeOffset end_time)
128 | {
129 | try
130 | {
131 | Console.WriteLine("Start detect...");
132 | Response get_response = await client.GetMultivariateModelAsync(model_id).ConfigureAwait(false);
133 |
134 | DetectionRequest detectionRequest = new DetectionRequest(datasource, start_time, end_time);
135 | Response result_response = await client.DetectAnomalyAsync(model_id, detectionRequest).ConfigureAwait(false);
136 | var ok = result_response.Headers.TryGetValue("Location", out string result_id_path);
137 | Guid result_id = Guid.Parse(result_id_path.Split('/').LastOrDefault());
138 | // get detection result
139 | Response result = await client.GetDetectionResultAsync(result_id).ConfigureAwait(false);
140 | while (result.Value.Summary.Status != DetectionStatus.Ready & result.Value.Summary.Status != DetectionStatus.Failed)
141 | {
142 | System.Threading.Thread.Sleep(2000);
143 | result = await client.GetDetectionResultAsync(result_id).ConfigureAwait(false);
144 | }
145 |
146 | if (result.Value.Summary.Status != DetectionStatus.Ready)
147 | {
148 | Console.WriteLine(String.Format("Inference failed."));
149 | IReadOnlyList errors = result.Value.Summary.Errors;
150 | foreach (ErrorResponse error in errors)
151 | {
152 | Console.WriteLine(String.Format("Error code: {0}.", error.Code));
153 | Console.WriteLine(String.Format("Error message: {0}.", error.Message));
154 | }
155 | return null;
156 | }
157 |
158 | return result.Value;
159 | }
160 | catch (Exception e)
161 | {
162 | Console.WriteLine(String.Format("Detection error. {0}", e.Message));
163 | throw new Exception(e.Message);
164 | }
165 | }
166 | #endregion
167 |
168 | #region Snippet:ExportMultivariateModel
169 | private async Task exportAsync(AnomalyDetectorClient client, Guid model_id, string model_path = "model.zip")
170 | {
171 | try
172 | {
173 | Stream model = await client.ExportModelAsync(model_id).ConfigureAwait(false);
174 | if (model != null)
175 | {
176 | var fileStream = File.Create(model_path);
177 | model.Seek(0, SeekOrigin.Begin);
178 | model.CopyTo(fileStream);
179 | fileStream.Close();
180 | }
181 | }
182 | catch (Exception e)
183 | {
184 | Console.WriteLine(String.Format("Export error. {0}", e.Message));
185 | throw new Exception(e.Message);
186 | }
187 | }
188 | #endregion
189 |
190 | #region Snippet:DeleteMultivariateModel
191 | private async Task deleteAsync(AnomalyDetectorClient client, Guid model_id)
192 | {
193 | await client.DeleteMultivariateModelAsync(model_id).ConfigureAwait(false);
194 | int model_number = await getModelNumberAsync(client).ConfigureAwait(false);
195 | Console.WriteLine(String.Format("{0} available models after deletion.", model_number));
196 | }
197 | private async Task getModelNumberAsync(AnomalyDetectorClient client, bool delete = false)
198 | {
199 | int count = 0;
200 | AsyncPageable model_list = client.ListMultivariateModelAsync(0, 10000);
201 | await foreach (ModelSnapshot x in model_list)
202 | {
203 | count += 1;
204 | Console.WriteLine(String.Format("model_id: {0}, createdTime: {1}, lastUpdateTime: {2}.", x.ModelId, x.CreatedTime, x.LastUpdatedTime));
205 | if (delete & count < 4)
206 | {
207 | await client.DeleteMultivariateModelAsync(x.ModelId).ConfigureAwait(false);
208 | }
209 | }
210 | return count;
211 | }
212 | #endregion
213 | }
214 | }
--------------------------------------------------------------------------------
/samples-multivariate/sample_multivariate_detect.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | """
5 | FILE: sample_multivariate_detect.py
6 |
7 | DESCRIPTION:
8 | This sample demonstrates how to use multivariate dataset to train a model and use the model to detect anomalies.
9 |
10 | Prerequisites:
11 | * The Anomaly Detector client library for Python
12 | * A valid data feed
13 |
14 | USAGE:
15 | python sample_multivariate_detect.py
16 |
17 | Set the environment variables with your own values before running the sample:
18 | 1) ANOMALY_DETECTOR_KEY - your source Form Anomaly Detector API key.
19 | 2) ANOMALY_DETECTOR_ENDPOINT - the endpoint to your source Anomaly Detector resource.
20 | """
21 |
22 | import os
23 | import time
24 | from datetime import datetime, timezone
25 |
26 | from azure.ai.anomalydetector import AnomalyDetectorClient
27 | from azure.ai.anomalydetector.models import DetectionRequest, ModelInfo
28 | from azure.ai.anomalydetector.models import ModelStatus, DetectionStatus
29 | from azure.core.credentials import AzureKeyCredential
30 | from azure.core.exceptions import HttpResponseError
31 |
32 |
33 | class MultivariateSample:
34 |
35 | def __init__(self, subscription_key, anomaly_detector_endpoint, data_source=None):
36 | self.sub_key = subscription_key
37 | self.end_point = anomaly_detector_endpoint
38 |
39 | # Create an Anomaly Detector client
40 |
41 | #
42 | self.ad_client = AnomalyDetectorClient(AzureKeyCredential(self.sub_key), self.end_point)
43 | #
44 |
45 | self.data_source = data_source
46 |
47 | def train(self, start_time, end_time):
48 | # Number of models available now
49 | model_list = list(self.ad_client.list_multivariate_model(skip=0, top=10000))
50 | print("{:d} available models before training.".format(len(model_list)))
51 |
52 | # Use sample data to train the model
53 | print("Training new model...(it may take a few minutes)")
54 | data_feed = ModelInfo(start_time=start_time, end_time=end_time, source=self.data_source)
55 | response_header = \
56 | self.ad_client.train_multivariate_model(data_feed, cls=lambda *args: [args[i] for i in range(len(args))])[-1]
57 | trained_model_id = response_header['Location'].split("/")[-1]
58 |
59 | # Wait until the model is ready. It usually takes several minutes
60 | model_status = None
61 |
62 | while model_status != ModelStatus.READY and model_status != ModelStatus.FAILED:
63 | model_info = self.ad_client.get_multivariate_model(trained_model_id).model_info
64 | model_status = model_info.status
65 | time.sleep(10)
66 |
67 | if model_status == ModelStatus.FAILED:
68 | print("Creating model failed.")
69 | print("Errors:")
70 | if model_info.errors:
71 | for error in model_info.errors:
72 | print("Error code: {}. Message: {}".format(error.code, error.message))
73 | else:
74 | print("None")
75 | return None
76 |
77 | if model_status == ModelStatus.READY:
78 | # Model list after training
79 | new_model_list = list(self.ad_client.list_multivariate_model(skip=0, top=10000))
80 |
81 | print("Done.\n--------------------")
82 | print("{:d} available models after training.".format(len(new_model_list)))
83 |
84 | # Return the latest model id
85 | return trained_model_id
86 |
87 |
88 | def detect(self, model_id, start_time, end_time):
89 | # Detect anomaly in the same data source (but a different interval)
90 | try:
91 | detection_req = DetectionRequest(source=self.data_source, start_time=start_time, end_time=end_time)
92 | response_header = self.ad_client.detect_anomaly(model_id, detection_req,
93 | cls=lambda *args: [args[i] for i in range(len(args))])[-1]
94 | result_id = response_header['Location'].split("/")[-1]
95 |
96 | # Get results (may need a few seconds)
97 | r = self.ad_client.get_detection_result(result_id)
98 | print("Get detection result...(it may take a few seconds)")
99 |
100 | while r.summary.status != DetectionStatus.READY and r.summary.status != DetectionStatus.FAILED:
101 | r = self.ad_client.get_detection_result(result_id)
102 | time.sleep(1)
103 |
104 | if r.summary.status == DetectionStatus.FAILED:
105 | print("Detection failed.")
106 | print("Errors:")
107 | if r.summary.errors:
108 | for error in r.summary.errors:
109 | print("Error code: {}. Message: {}".format(error.code, error.message))
110 | else:
111 | print("None")
112 | return None
113 |
114 | except HttpResponseError as e:
115 | print('Error code: {}'.format(e.error.code), 'Error message: {}'.format(e.error.message))
116 | except Exception as e:
117 | raise e
118 |
119 | return r
120 |
121 | def export_model(self, model_id, model_path="model.zip"):
122 |
123 | # Export the model
124 | model_stream_generator = self.ad_client.export_model(model_id)
125 | with open(model_path, "wb") as f_obj:
126 | while True:
127 | try:
128 | f_obj.write(next(model_stream_generator))
129 | except StopIteration:
130 | break
131 | except Exception as e:
132 | raise e
133 |
134 | def delete_model(self, model_id):
135 |
136 | # Delete the mdoel
137 | self.ad_client.delete_multivariate_model(model_id)
138 | model_list_after_delete = list(self.ad_client.list_multivariate_model(skip=0, top=10000))
139 | print("{:d} available models after deletion.".format(len(model_list_after_delete)))
140 |
141 |
142 | if __name__ == '__main__':
143 | SUBSCRIPTION_KEY = os.environ["ANOMALY_DETECTOR_KEY"]
144 | ANOMALY_DETECTOR_ENDPOINT = os.environ["ANOMALY_DETECTOR_ENDPOINT"]
145 |
146 | # *****************************
147 | # Use your own data source here
148 | # *****************************
149 | data_source = ""
150 |
151 | # Create a new sample and client
152 | sample = MultivariateSample(SUBSCRIPTION_KEY, ANOMALY_DETECTOR_ENDPOINT, data_source)
153 |
154 | # Train a new model
155 | model_id = sample.train(datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
156 | datetime(2021, 1, 2, 12, 0, 0, tzinfo=timezone.utc))
157 | assert model_id is not None
158 |
159 | # Reference
160 | result = sample.detect(model_id, datetime(2021, 1, 2, 12, 0, 0, tzinfo=timezone.utc),
161 | datetime(2021, 1, 3, 0, 0, 0, tzinfo=timezone.utc))
162 | assert result is not None
163 |
164 | print("Result ID:\t", result.result_id)
165 | print("Result summary:\t", result.summary)
166 | print("Result length:\t", len(result.results))
167 |
168 | # Export model
169 | sample.export_model(model_id, "model.zip")
170 |
171 | # Delete model
172 | sample.delete_model(model_id)
173 |
--------------------------------------------------------------------------------
/samples-multivariate/sample_multivariate_detection.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation.
2 | // Licensed under the MIT License.
3 |
4 | /**
5 | * Demonstrates how to train a model on multivariate data and use this model to detect anomalies.
6 | */
7 |
8 | const { AnomalyDetectorClient } = require("@azure/ai-anomaly-detector");
9 | const { AzureKeyCredential } = require("@azure/core-auth");
10 | const fs = require("fs");
11 |
12 |
13 | // Load the .env file if it exists
14 | const dotenv = require("dotenv");
15 | dotenv.config();
16 |
17 | // You will need to set this environment variables in .env file or edit the following values
18 | const apiKey = process.env["API_KEY"] || "";
19 | const endpoint = process.env["ENDPOINT"] || "";
20 | const data_source = "";
21 |
22 |
23 |
24 | function sleep (time) {
25 | return new Promise((resolve) => setTimeout(resolve, time));
26 | }
27 |
28 | async function main() {
29 |
30 | // create client
31 | const client = new AnomalyDetectorClient(endpoint, new AzureKeyCredential(apiKey));
32 |
33 | // Already available models
34 | const model_list = await client.listMultivariateModel();
35 | console.log("The latest 5 available models (if exist):");
36 | for(var i = 0 ; i < 5 ; i++) {
37 | let model_detail = (await model_list.next());
38 | if (model_detail.done == true) break
39 | console.log(model_detail.value);
40 | }
41 |
42 | // construct model request (notice that the start and end time are local time and may not align with your data source)
43 | const Modelrequest = {
44 | source: data_source,
45 | startTime: new Date(2021,0,1,0,0,0),
46 | endTime: new Date(2021,0,2,12,0,0),
47 | slidingWindow:200
48 | };
49 |
50 | // get train result
51 | console.log("Training a new model...");
52 | const train_response = await client.trainMultivariateModel(Modelrequest);
53 | const model_id = train_response.location.split("/").pop();
54 | console.log("New model ID: " + model_id);
55 |
56 | // get model status
57 | let model_response = await client.getMultivariateModel(model_id);
58 | let model_status = model_response.modelInfo.status;
59 |
60 | while (model_status != 'READY' && model_status != 'FAILED'){
61 | await sleep(10000).then(() => {});
62 | model_response = await client.getMultivariateModel(model_id);
63 | model_status = model_response.modelInfo.status;
64 | }
65 |
66 | if (model_status == 'FAILED') {
67 | console.log("Training failed.\nErrors:");
68 | for (let error of model_response.modelInfo?.errors ?? []) {
69 | console.log("Error code: " + error.code + ". Message: " + error.message);
70 | }
71 | return;
72 | }
73 | console.log("TRAINING FINISHED.");
74 |
75 | // get result
76 | console.log("Start detecting...");
77 | const detect_request = {
78 | source: data_source,
79 | startTime: new Date(2021,0,2,12,0,0),
80 | endTime: new Date(2021,0,3,0,0,0)
81 | };
82 | const result_header = await client.detectAnomaly(model_id, detect_request);
83 | const result_id = result_header.location?.split("/").pop() ?? "";
84 | let result = await client.getDetectionResult(result_id);
85 | let result_status = result.summary.status;
86 |
87 | while (result_status != 'READY' && result_status != 'FAILED'){
88 | await sleep(2000).then(() => {});
89 | result = await client.getDetectionResult(result_id);
90 | result_status = result.summary.status;
91 | }
92 |
93 | if (result_status == 'FAILED') {
94 | console.log("Detection failed.\nErrors:");
95 | for (let error of result.summary.errors ?? []) {
96 | console.log("Error code: " + error.code + ". Message: " + error.message)
97 | }
98 | return;
99 | }
100 | console.log("Result status: " + result_status);
101 | console.log("Result Id: " + result.resultId);
102 |
103 | // export the model
104 | const export_result = await client.exportModel(model_id);
105 | const model_path = "model.zip"
106 | const destination = fs.createWriteStream(model_path);
107 | export_result.readableStreamBody?.pipe(destination);
108 | console.log("New model has been exported to " + model_path + ".");
109 |
110 | // delete model
111 | let delete_result = client.deleteMultivariateModel(model_id);
112 | if ((await delete_result)._response.status == "204")
113 | console.log("New model has been deleted.");
114 | else
115 | console.log("Failed to delete the new model.");
116 | }
117 |
118 | main().catch((err) => {
119 | console.error("The sample encountered an error:", err);
120 | });
121 |
--------------------------------------------------------------------------------
/samples-univariate/csharp-detect-anomalies.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 | //
4 | using System;
5 | using System.IO;
6 | using System.Net;
7 | using System.Net.Http;
8 | using System.Net.Http.Headers;
9 | using System.Text;
10 | using System.Threading.Tasks;
11 | //
12 |
13 | namespace Console
14 | {
15 | class Program
16 | {
17 | //
18 | //This sample assumes you have created an environment variable for your key and endpoint
19 | static readonly string subscriptionKey = Environment.GetEnvironmentVariable("ANOMALY_DETECTOR_KEY");
20 | static readonly string endpoint = Environment.GetEnvironmentVariable("ANOMALY_DETECTOR_ENDPOINT");
21 |
22 | // Replace the dataPath string with a path to the JSON formatted time series data.
23 | const string dataPath = "[PATH_TO_TIME_SERIES_DATA]";
24 |
25 | const string latestPointDetectionUrl = "/anomalydetector/v1.0/timeseries/last/detect";
26 | const string batchDetectionUrl = "/anomalydetector/v1.0/timeseries/entire/detect";
27 | //
28 |
29 | //
30 | static void Main(string[] args)
31 | {
32 | //read in the JSON time series data for the API request
33 | var requestData = File.ReadAllText(dataPath);
34 |
35 | detectAnomaliesBatch(requestData);
36 | detectAnomaliesLatest(requestData);
37 | detectChangePoints(requestData);
38 | System.Console.WriteLine("\nPress any key to exit ");
39 | System.Console.ReadKey();
40 | }
41 | //
42 | //
43 | static void detectAnomaliesBatch(string requestData)
44 | {
45 | System.Console.WriteLine("Detecting anomalies as a batch");
46 |
47 | //construct the request
48 | var result = Request(
49 | endpoint,
50 | batchDetectionUrl,
51 | subscriptionKey,
52 | requestData).Result;
53 |
54 | //deserialize the JSON object, and display it
55 | dynamic jsonObj = Newtonsoft.Json.JsonConvert.DeserializeObject(result);
56 | System.Console.WriteLine(jsonObj);
57 |
58 | if (jsonObj["code"] != null)
59 | {
60 | System.Console.WriteLine($"Detection failed. ErrorCode:{jsonObj["code"]}, ErrorMessage:{jsonObj["message"]}");
61 | }
62 | else
63 | {
64 | //Find and display the positions of anomalies in the data set
65 | bool[] anomalies = jsonObj["isAnomaly"].ToObject();
66 | System.Console.WriteLine("\nAnomalies detected in the following data positions:");
67 | for (var i = 0; i < anomalies.Length; i++)
68 | {
69 | if (anomalies[i])
70 | {
71 | System.Console.Write(i + ", ");
72 | }
73 | }
74 | }
75 | }
76 | //
77 | //
78 | static void detectAnomaliesLatest(string requestData)
79 | {
80 | System.Console.WriteLine("\n\nDetermining if latest data point is an anomaly");
81 | //construct the request
82 | var result = Request(
83 | endpoint,
84 | latestPointDetectionUrl,
85 | subscriptionKey,
86 | requestData).Result;
87 |
88 | //deserialize the JSON object, and display it
89 | dynamic jsonObj = Newtonsoft.Json.JsonConvert.DeserializeObject(result);
90 | System.Console.WriteLine(jsonObj);
91 | }
92 | //
93 |
94 | //
95 | static void detectChangePoints(string requestData)
96 | {
97 | System.Console.WriteLine("\n\nDetecting change points in the series.");
98 | //construct the request
99 | var result = Request(
100 | endpoint,
101 | changePointDetectionUrl,
102 | subscriptionKey,
103 | requestData).Result;
104 |
105 | //deserialize the JSON object, and display it
106 | dynamic jsonObj = Newtonsoft.Json.JsonConvert.DeserializeObject(result);
107 | System.Console.WriteLine(jsonObj);
108 |
109 | if (jsonObj["code"] != null)
110 | {
111 | System.Console.WriteLine($"Detection failed. ErrorCode:{jsonObj["code"]}, ErrorMessage:{jsonObj["message"]}");
112 | }
113 | else
114 | {
115 | //Find and display the positions of anomalies in the data set
116 | bool[] anomalies = jsonObj["isChangePoint"].ToObject();
117 | System.Console.WriteLine("\Change points detected in the following data positions:");
118 | for (var i = 0; i < anomalies.Length; i++)
119 | {
120 | if (anomalies[i])
121 | {
122 | System.Console.Write(i + ", ");
123 | }
124 | }
125 | }
126 | }
127 | //
128 |
129 | ///
130 | /// Sends a request to the Anomaly Detection API to detect anomaly points
131 | ///
132 | /// Address of the API.
133 | /// The endpoint of the API
134 | /// The subscription key applied
135 | /// The JSON string for requet data points
136 | /// The JSON string for anomaly points and expected values.
137 | //
138 | static async Task Request(string apiAddress, string endpoint, string subscriptionKey, string requestData)
139 | {
140 | using (HttpClient client = new HttpClient { BaseAddress = new Uri(apiAddress) })
141 | {
142 | System.Net.ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12 | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls;
143 | client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
144 | client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", subscriptionKey);
145 |
146 | var content = new StringContent(requestData, Encoding.UTF8, "application/json");
147 | var res = await client.PostAsync(endpoint, content);
148 | return await res.Content.ReadAsStringAsync();
149 | }
150 | }
151 | //
152 | }
153 | }
154 |
--------------------------------------------------------------------------------
/samples-univariate/java-detect-anomalies.java:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 | //
4 | import org.apache.http.HttpEntity;
5 | import org.apache.http.client.methods.CloseableHttpResponse;
6 | import org.apache.http.client.methods.HttpPost;
7 | import org.apache.http.entity.StringEntity;
8 | import org.apache.http.impl.client.CloseableHttpClient;
9 | import org.apache.http.impl.client.HttpClients;
10 | import org.apache.http.util.EntityUtils;
11 | import org.json.JSONArray;
12 | import org.json.JSONObject;
13 |
14 | import java.io.IOException;
15 | import java.nio.file.Files;
16 | import java.nio.file.Paths;
17 | //
18 |
19 | public class JavaDetect {
20 | //
21 | // This sample assumes you have created an environment variable for your key and endpoint
22 | static final String subscriptionKey = System.getenv("ANOMALY_DETECTOR_KEY");
23 | static final String endpoint = System.getenv("ANOMALY_DETECTOR_ENDPOINT");
24 |
25 | // Replace the dataPath string with a path to the JSON formatted time series data.
26 | static final String dataPath = "[PATH_TO_TIME_SERIES_DATA]";
27 |
28 | // Urls for anomaly detection on:
29 | // A batch of data points, or
30 | // The latest data point in the time series
31 | static final String latestPointDetectionUrl = "/anomalydetector/v1.0/timeseries/last/detect";
32 | static final String batchDetectionUrl = "/anomalydetector/v1.0/timeseries/entire/detect";
33 | static final String changePointDetectionUrl = "/anomalydetector/v1.0/timeseries/changepoint/detect";
34 | //
35 | //
36 | public static void main(String[] args) throws Exception {
37 |
38 | String requestData = new String(Files.readAllBytes(Paths.get(dataPath)), "utf-8");
39 |
40 | detectAnomaliesBatch(requestData);
41 | detectAnomaliesLatest(requestData);
42 | detectChangePoints(requestData);
43 | }
44 | //
45 | //
46 | static void detectAnomaliesBatch(String requestData) {
47 | System.out.println("Detecting anomalies as a batch");
48 |
49 | String result = sendRequest(batchDetectionUrl, endpoint, subscriptionKey, requestData);
50 | if (result != null) {
51 | System.out.println(result);
52 |
53 | JSONObject jsonObj = new JSONObject(result);
54 | if (jsonObj.has("code")) {
55 | System.out.println(String.format("Detection failed. ErrorCode:%s, ErrorMessage:%s", jsonObj.getString("code"), jsonObj.getString("message")));
56 | } else {
57 | JSONArray jsonArray = jsonObj.getJSONArray("isAnomaly");
58 | System.out.println("Anomalies found in the following data positions:");
59 | for (int i = 0; i < jsonArray.length(); ++i) {
60 | if (jsonArray.getBoolean(i))
61 | System.out.print(i + ", ");
62 | }
63 | System.out.println();
64 | }
65 | }
66 | }
67 | //
68 | //
69 | static void detectAnomaliesLatest(String requestData) {
70 | System.out.println("Determining if latest data point is an anomaly");
71 | String result = sendRequest(latestPointDetectionUrl, endpoint, subscriptionKey, requestData);
72 | System.out.println(result);
73 | }
74 | //
75 | //
76 | static void detectChangePoints(String requestData) {
77 | System.out.println("Detecting change points");
78 |
79 | String result = sendRequest(changePointDetectionUrl, endpoint, subscriptionKey, requestData);
80 | if (result != null) {
81 | System.out.println(result);
82 |
83 | JSONObject jsonObj = new JSONObject(result);
84 | if (jsonObj.has("code")) {
85 | System.out.println(String.format("Detection failed. ErrorCode:%s, ErrorMessage:%s", jsonObj.getString("code"), jsonObj.getString("message")));
86 | } else {
87 | JSONArray jsonArray = jsonObj.getJSONArray("isChangePoint");
88 | System.out.println("Change points found in the following data positions:");
89 | for (int i = 0; i < jsonArray.length(); ++i) {
90 | if (jsonArray.getBoolean(i))
91 | System.out.print(i + ", ");
92 | }
93 | System.out.println();
94 | }
95 | }
96 | }
97 | //
98 | //
99 | static String sendRequest(String apiAddress, String endpoint, String subscriptionKey, String requestData) {
100 | try (CloseableHttpClient client = HttpClients.createDefault()) {
101 | HttpPost request = new HttpPost(endpoint + apiAddress);
102 | // Request headers.
103 | request.setHeader("Content-Type", "application/json");
104 | request.setHeader("Ocp-Apim-Subscription-Key", subscriptionKey);
105 | request.setEntity(new StringEntity(requestData));
106 | try (CloseableHttpResponse response = client.execute(request)) {
107 | HttpEntity respEntity = response.getEntity();
108 | if (respEntity != null) {
109 | return EntityUtils.toString(respEntity, "utf-8");
110 | }
111 | } catch (Exception respEx) {
112 | respEx.printStackTrace();
113 | }
114 | } catch (IOException ex) {
115 | System.err.println("Exception on Anomaly Detector: " + ex.getMessage());
116 | ex.printStackTrace();
117 | }
118 | return null;
119 | }
120 | //
121 | }
122 |
--------------------------------------------------------------------------------
/samples-univariate/python-detect-anomalies.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | #
4 | import os
5 | import requests
6 | import json
7 | #
8 | #
9 | # URLs for anomaly detection with the Anomaly Detector API
10 | batch_detection_url = "/anomalydetector/v1.0/timeseries/entire/detect"
11 | latest_point_detection_url = "/anomalydetector/v1.0/timeseries/last/detect"
12 | change_point_detection_url = "/anomalydetector/v1.0/timeseries/changepoint/detect"
13 |
14 | # This sample assumes you have created an environment variable for your key and endpoint
15 | endpoint = os.environ["ANOMALY_DETECTOR_ENDPOINT"]
16 | subscription_key = os.environ["ANOMALY_DETECTOR_KEY"]
17 |
18 | # Replace with a path to the JSON formatted time series data.
19 | data_location = "[PATH_TO_TIME_SERIES_DATA]"
20 | #
21 |
22 | """
23 | Sends an anomaly detection request to the Anomaly Detector API.
24 | If the request is successful, the JSON response is returned.
25 | """
26 | #
27 | def send_request(endpoint, url, subscription_key, request_data):
28 | headers = {'Content-Type': 'application/json', 'Ocp-Apim-Subscription-Key': subscription_key}
29 | response = requests.post(endpoint+url, data=json.dumps(request_data), headers=headers)
30 | return json.loads(response.content.decode("utf-8"))
31 | #
32 | """
33 | Detect anomalies throughout the time series data by submitting it as a batch to the API.
34 | """
35 | #
36 | def detect_batch(request_data):
37 | print("Detecting anomalies as a batch")
38 | # Send the request, and print the JSON result
39 | result = send_request(endpoint, batch_detection_url, subscription_key, request_data)
40 | print(json.dumps(result, indent=4))
41 |
42 | if result.get('code') is not None:
43 | print("Detection failed. ErrorCode:{}, ErrorMessage:{}".format(result['code'], result['message']))
44 | else:
45 | # Find and display the positions of anomalies in the data set
46 | anomalies = result["isAnomaly"]
47 | print("Anomalies detected in the following data positions:")
48 |
49 | for x in range(len(anomalies)):
50 | if anomalies[x]:
51 | print (x, request_data['series'][x]['value'])
52 | #
53 | """
54 | Detect if the latest data point in the time series is an anomaly.
55 | """
56 | #
57 | def detect_latest(request_data):
58 | print("Determining if latest data point is an anomaly")
59 | # send the request, and print the JSON result
60 | result = send_request(endpoint, latest_point_detection_url, subscription_key, request_data)
61 | print(json.dumps(result, indent=4))
62 | #
63 | """
64 | Detect change point.
65 | """
66 | #
67 | def detect_change_point(request_data):
68 | print("Detecting change point")
69 | # send the request, and print the JSON result
70 | result = send_request(endpoint, change_point_detection_url, subscription_key, request_data)
71 | print(json.dumps(result, indent=4))
72 |
73 | if result.get('code') is not None:
74 | print("Detection failed. ErrorCode:{}, ErrorMessage:{}".format(result['code'], result['message']))
75 | else:
76 | # Find and display the positions of changePoint in the data set
77 | change_points = result["isChangePoint"]
78 | print("changePoints detected in the following data positions:")
79 |
80 | for x in range(len(change_points)):
81 | if change_points[x]:
82 | print(x, request_data['series'][x]['value'])
83 | #
84 |
85 | # read json time series data from file
86 | #
87 | file_handler = open(data_location)
88 | json_data = json.load(file_handler)
89 | #
90 | #
91 | detect_batch(json_data)
92 | detect_latest(json_data)
93 | detect_change_point(json_data)
94 | #
95 |
--------------------------------------------------------------------------------
/samples-univariate/sdk/csharp-sdk-sample.cs:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright (c) Microsoft Corporation. All rights reserved.
3 | Licensed under the MIT License.
4 |
5 |
6 | This sample demonstrates the Anomaly Detection service's two detection methods:
7 | * Anomaly detection on an entire time-series dataset.
8 | * Anomaly detection on the latest data point in a dataset.
9 |
10 | * Prerequisites:
11 | * The Anomaly Detector client library for .NET
12 | * A .csv file containing a time-series data set with
13 | UTC-timestamp and numerical values pairings.
14 | Example data is included in this repo.
15 | */
16 |
17 | namespace AnomalyDetectorSample
18 | {
19 | //
20 | using System;
21 | using System.IO;
22 | using System.Text;
23 | using System.Linq;
24 | using System.Collections.Generic;
25 | using System.Threading.Tasks;
26 | using Microsoft.Azure.CognitiveServices.AnomalyDetector;
27 | using Microsoft.Azure.CognitiveServices.AnomalyDetector.Models;
28 | //
29 |
30 | class Program{
31 |
32 | //
33 | static void Main(string[] args){
34 | //This sample assumes you have created an environment variable for your key and endpoint
35 | string endpoint = Environment.GetEnvironmentVariable("ANOMALY_DETECTOR_ENDPOINT");
36 | string key = Environment.GetEnvironmentVariable("ANOMALY_DETECTOR_KEY");
37 | string datapath = "request-data.csv";
38 |
39 | IAnomalyDetectorClient client = createClient(endpoint, key); //Anomaly Detector client
40 |
41 | Request request = GetSeriesFromFile(datapath); // The request payload with points from the data file
42 |
43 | EntireDetectSampleAsync(client, request).Wait(); // Async method for batch anomaly detection
44 | LastDetectSampleAsync(client, request).Wait(); // Async method for analyzing the latest data point in the set
45 | DetectChangePoint(client, request).Wait(); // Async method for change point detection
46 |
47 | Console.WriteLine("\nPress ENTER to exit.");
48 | Console.ReadLine();
49 | }
50 | //
51 |
52 | //
53 | static IAnomalyDetectorClient createClient(string endpoint, string key)
54 | {
55 | IAnomalyDetectorClient client = new AnomalyDetectorClient(new ApiKeyServiceClientCredentials(key))
56 | {
57 | Endpoint = endpoint
58 | };
59 | return client;
60 | }
61 | //
62 |
63 | //
64 | //Run the anomaly detection examples with extra error handling
65 | static void runSamples(IAnomalyDetectorClient client, string dataPath)
66 | {
67 |
68 | try
69 | {
70 | List series = GetSeriesFromFile(dataPath);
71 | Request request = new Request(series, Granularity.Daily);
72 |
73 | EntireDetectSampleAsync(client, request).Wait();
74 | LastDetectSampleAsync(client, request).Wait();
75 | }
76 | catch (Exception e)
77 | {
78 | Console.WriteLine(e.Message);
79 | if (e.InnerException != null && e.InnerException is APIErrorException)
80 | {
81 | APIError error = ((APIErrorException)e.InnerException).Body;
82 | Console.WriteLine("Error code: " + error.Code);
83 | Console.WriteLine("Error message: " + error.Message);
84 | }
85 | else if (e.InnerException != null)
86 | {
87 | Console.WriteLine(e.InnerException.Message);
88 | }
89 | }
90 | }
91 | //
92 |
93 | //
94 | static Request GetSeriesFromFile(string path)
95 | {
96 | List list = File.ReadAllLines(path, Encoding.UTF8)
97 | .Where(e => e.Trim().Length != 0)
98 | .Select(e => e.Split(','))
99 | .Where(e => e.Length == 2)
100 | .Select(e => new Point(DateTime.Parse(e[0]), Double.Parse(e[1]))).ToList();
101 |
102 | return new Request(list, Granularity.Daily);
103 | }
104 | //
105 |
106 | //
107 | static async Task EntireDetectSampleAsync(IAnomalyDetectorClient client, Request request)
108 | {
109 | Console.WriteLine("Detecting anomalies in the entire time series.");
110 |
111 | EntireDetectResponse result = await client.EntireDetectAsync(request).ConfigureAwait(false);
112 |
113 | if (result.IsAnomaly.Contains(true))
114 | {
115 | Console.WriteLine("An anomaly was detected at index:");
116 | for (int i = 0; i < request.Series.Count; ++i)
117 | {
118 | if (result.IsAnomaly[i])
119 | {
120 | Console.Write(i);
121 | Console.Write(" ");
122 | }
123 | }
124 | Console.WriteLine();
125 | }
126 | else
127 | {
128 | Console.WriteLine(" No anomalies detected in the series.");
129 | }
130 | }
131 | //
132 |
133 | //
134 | static async Task LastDetectSampleAsync(IAnomalyDetectorClient client, Request request)
135 | {
136 |
137 | Console.WriteLine("Detecting the anomaly status of the latest point in the series.");
138 | LastDetectResponse result = await client.LastDetectAsync(request).ConfigureAwait(false);
139 |
140 | if (result.IsAnomaly)
141 | {
142 | Console.WriteLine("The latest point was detected as an anomaly.");
143 | }
144 | else
145 | {
146 | Console.WriteLine("The latest point was not detected as an anomaly.");
147 | }
148 | }
149 | //
150 |
151 | //
152 | public async Task DetectChangePoint(IAnomalyDetectorClient client, Request request)
153 | {
154 | Console.WriteLine("Detecting the change points in the series.");
155 |
156 | ChangePointDetectResponse result = await client.DetectChangePointAsync(request).ConfigureAwait(false);
157 |
158 | if (result.IsChangePoint.Contains(true))
159 | {
160 | Console.WriteLine("A change point was detected at index:");
161 | for (int i = 0; i < request.Series.Count; ++i)
162 | {
163 | if (result.IsChangePoint[i])
164 | {
165 | Console.Write(i);
166 | Console.Write(" ");
167 | }
168 | }
169 | Console.WriteLine();
170 | }
171 | else
172 | {
173 | Console.WriteLine("No change point detected in the series.");
174 | }
175 | }
176 | //
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/samples-univariate/sdk/go-sdk-sample.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "bufio"
5 | "context"
6 | "encoding/csv"
7 | "fmt"
8 | "github.com/Azure/azure-sdk-for-go/services/preview/cognitiveservices/v1.0/anomalydetector"
9 | "github.com/Azure/go-autorest/autorest"
10 | "github.com/Azure/go-autorest/autorest/date"
11 | "io"
12 | "log"
13 | "os"
14 | "strconv"
15 | "time"
16 | )
17 |
18 | func getAnomalyDetectorClient(endpoint string, key string) anomalydetector.BaseClient {
19 | client := anomalydetector.New(endpoint)
20 | client.Authorizer = autorest.NewCognitiveServicesAuthorizer(key)
21 | return client
22 | }
23 |
24 | func getSeriesFromFile(path string) []anomalydetector.Point {
25 | var series []anomalydetector.Point
26 |
27 | csvFile, _ := os.Open(path)
28 | reader := csv.NewReader(bufio.NewReader(csvFile))
29 | for {
30 | line, err := reader.Read()
31 | if err == io.EOF {
32 | break
33 | } else if err != nil {
34 | log.Fatal(err)
35 | }
36 | timestamp, _ := time.Parse(time.RFC3339, line[0])
37 | value, _ := strconv.ParseFloat(line[1], 64)
38 |
39 | series = append(series, anomalydetector.Point{Timestamp: &date.Time{timestamp}, Value: &value})
40 | }
41 | return series
42 | }
43 |
44 | func entireDetectSample(endpoint string, key string, request anomalydetector.Request) {
45 | fmt.Println("Sample of detecting anomalies in the entire series.")
46 | client := getAnomalyDetectorClient(endpoint, key)
47 | response, err := client.EntireDetect(context.Background(), request)
48 | if err != nil {
49 | log.Fatal("ERROR:", err)
50 | }
51 |
52 | var anomalies []int
53 | for idx, isAnomaly := range *response.IsAnomaly {
54 | if isAnomaly {
55 | anomalies = append(anomalies, idx)
56 | }
57 | }
58 | if len(anomalies) > 0 {
59 | fmt.Println("Anomaly was detected from the series at index:")
60 | for _, idx := range anomalies {
61 | fmt.Println(idx)
62 | }
63 | } else {
64 | fmt.Println("There is no anomaly detected from the series.")
65 | }
66 | }
67 |
68 | func lastDetectSample(endpoint string, key string, request anomalydetector.Request) {
69 | fmt.Println("Sample of detecting whether the latest point in series is anomaly.")
70 | client := getAnomalyDetectorClient(endpoint, key)
71 | response, err := client.LastDetect(context.Background(), request)
72 | if err != nil {
73 | log.Fatal("ERROR:", err)
74 | }
75 |
76 | if *response.IsAnomaly {
77 | fmt.Println("The latest point is detected as anomaly.")
78 | } else {
79 | fmt.Println("The latest point is not detected as anomaly.")
80 | }
81 | }
82 |
83 |
84 | func main() {
85 | var endpoint = "[YOUR_ENDPOINT_URL]"
86 | var key = "[YOUR_SUBSCRIPTION_KEY]"
87 | var path = "[PATH_TO_TIME_SERIES_DATA]"
88 |
89 | var series = getSeriesFromFile(path)
90 | var request = anomalydetector.Request{Series: &series, Granularity: anomalydetector.Daily}
91 |
92 | entireDetectSample(endpoint, key, request)
93 | lastDetectSample(endpoint, key, request)
94 | }
95 |
--------------------------------------------------------------------------------
/samples-univariate/sdk/node/.gitignore:
--------------------------------------------------------------------------------
1 | build/
2 | node_modules/
3 | package-lock.json
--------------------------------------------------------------------------------
/samples-univariate/sdk/node/README.md:
--------------------------------------------------------------------------------
1 | # Steps to run this script
2 |
3 | * Go to parent folder of `package.json`
4 |
5 | * Execute `npm install`
6 |
7 | * Replace the `"[YOUR_ENDPOINT_URL]"` with your own endpoint, replace the `"[YOUR_SUBSCRIPTION_KEY]"` with your own subscription key.
8 |
9 | * Execute `npm run index`
--------------------------------------------------------------------------------
/samples-univariate/sdk/node/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "testadsdk",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1",
8 | "tsc": "tsc",
9 | "index": "tsc && node ./build/index.js"
10 | },
11 | "author": "zhuxia@microsoft.com",
12 | "license": "ISC",
13 | "dependencies": {
14 | "@azure/cognitiveservices-anomalydetector": "^1.0.0",
15 | "csv-parse": "^4.4.0",
16 | "tsc": "^1.20150623.0",
17 | "typescript": "^3.4.3"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/samples-univariate/sdk/node/src/index.ts:
--------------------------------------------------------------------------------
1 | import * as msRest from "@azure/ms-rest-js";
2 | import { AnomalyDetectorClient, AnomalyDetectorModels, AnomalyDetectorMappers } from "@azure/cognitiveservices-anomalydetector";
3 | import * as fs from "fs";
4 | import parse from "csv-parse/lib/sync";
5 |
6 | function entire_detect_sample(endpoint: string, key: string, request: AnomalyDetectorModels.Request){
7 | console.log("Sample of detecting anomalies in the entire series.");
8 | const options: msRest.ApiKeyCredentialOptions = {
9 | inHeader: {
10 | "Ocp-Apim-Subscription-Key": key
11 | }
12 | };
13 |
14 | const client = new AnomalyDetectorClient(new msRest.ApiKeyCredentials(options), endpoint);
15 | client.entireDetect(request).then((result) => {
16 | if(result.isAnomaly.some(function(e){return e === true;})){
17 | console.log("Anomaly was detected from the series at index:");
18 | result.isAnomaly.forEach(function(e, i){
19 | if(e === true) console.log(i);
20 | });
21 | }else{
22 | console.log("There is no anomaly detected from the series.");
23 | }
24 | }).catch((err) => {
25 | if(err.body !== undefined){
26 | console.error("Error code: " + err.body.code);
27 | console.error("Error message: " + err.body.message);
28 | }else{
29 | console.error(err);
30 | }
31 | });
32 | }
33 |
34 | function last_detect_sample(endpoint: string, key: string, request: AnomalyDetectorModels.Request){
35 | console.log("Sample of detecting whether the latest point in series is anomaly.");
36 | const options: msRest.ApiKeyCredentialOptions = {
37 | inHeader: {
38 | "Ocp-Apim-Subscription-Key": key
39 | }
40 | };
41 |
42 | const client = new AnomalyDetectorClient(new msRest.ApiKeyCredentials(options), endpoint);
43 | client.lastDetect(request).then((result) => {
44 | if(result.isAnomaly){
45 | console.log("The latest point is detected as anomaly.");
46 | }else{
47 | console.log("The latest point is not detected as anomaly.");
48 | }
49 | }).catch((err) => {
50 | if(err.body !== undefined){
51 | console.error("Error code: " + err.body.code);
52 | console.error("Error message: " + err.body.message);
53 | }else{
54 | console.error(err);
55 | }
56 | });
57 | }
58 |
59 | function read_series_from_file(path: string): Array{
60 | let result = Array();
61 | let input = fs.readFileSync(path).toString();
62 | let parsed = parse(input, {skip_empty_lines:true});
63 | parsed.forEach(function(e: Array){
64 | result.push({timestamp:new Date(e[0]), value:Number(e[1])});
65 | });
66 | return result;
67 | }
68 |
69 | const endpoint = "[YOUR_ENDPOINT_URL]";
70 | const key = "[YOUR_SUBSCRIPTION_KEY]";
71 | const path = "[PATH_TO_TIME_SERIES_DATA]";
72 |
73 | const request: AnomalyDetectorModels.Request = {
74 | series: read_series_from_file(path),
75 | granularity: "daily",
76 | };
77 | entire_detect_sample(endpoint, key, request);
78 | last_detect_sample(endpoint, key, request);
79 |
80 |
--------------------------------------------------------------------------------
/samples-univariate/sdk/node/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | /* Basic Options */
4 | "target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
5 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
6 | // "lib": [], /* Specify library files to be included in the compilation. */
7 | // "allowJs": true, /* Allow javascript files to be compiled. */
8 | // "checkJs": true, /* Report errors in .js files. */
9 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
10 | // "declaration": true, /* Generates corresponding '.d.ts' file. */
11 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
12 | // "sourceMap": true, /* Generates corresponding '.map' file. */
13 | // "outFile": "./", /* Concatenate and emit output to single file. */
14 | "outDir": "./build", /* Redirect output structure to the directory. */
15 | // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
16 | // "composite": true, /* Enable project compilation */
17 | // "incremental": true, /* Enable incremental compilation */
18 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
19 | // "removeComments": true, /* Do not emit comments to output. */
20 | // "noEmit": true, /* Do not emit outputs. */
21 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */
22 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
23 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
24 |
25 | /* Strict Type-Checking Options */
26 | "strict": true, /* Enable all strict type-checking options. */
27 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
28 | // "strictNullChecks": true, /* Enable strict null checks. */
29 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */
30 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
31 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
32 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
33 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
34 |
35 | /* Additional Checks */
36 | // "noUnusedLocals": true, /* Report errors on unused locals. */
37 | // "noUnusedParameters": true, /* Report errors on unused parameters. */
38 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
39 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
40 |
41 | /* Module Resolution Options */
42 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
43 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
44 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
45 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
46 | // "typeRoots": [], /* List of folders to include type definitions from. */
47 | // "types": [], /* Type declaration files to be included in compilation. */
48 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
49 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
50 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
51 |
52 | /* Source Map Options */
53 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
54 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
55 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
56 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
57 |
58 | /* Experimental Options */
59 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
60 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/samples-univariate/sdk/python-sdk-sample.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 |
5 | # This sample demonstrates the Anomaly Detection service's two detection methods:
6 | # * Anomaly detection on an entire time-series dataset.
7 | # * Anomaly detection on the latest data point in a dataset.
8 |
9 | # * Prerequisites:
10 | # * The Anomaly Detector client library for Python
11 | # * A .csv file containing a time-series data set with
12 | # UTC-timestamp and numerical values pairings.
13 | # Example data is included in this repo.
14 |
15 | #
16 | from azure.cognitiveservices.anomalydetector import AnomalyDetectorClient
17 | from azure.cognitiveservices.anomalydetector.models import Request, Point, Granularity, \
18 | APIErrorException
19 | from msrest.authentication import CognitiveServicesCredentials
20 | import pandas as pd
21 | import os
22 | #
23 |
24 | #
25 | # This sample assumes you have created an environment variable for your key and endpoint
26 | SUBSCRIPTION_KEY = os.environ["ANOMALY_DETECTOR_KEY"]
27 | ANOMALY_DETECTOR_ENDPOINT = os.environ["ANOMALY_DETECTOR_ENDPOINT"]
28 |
29 | TIME_SERIES_DATA_PATH = "request-data.csv"
30 | #
31 |
32 | # Create an Anomaly Detector client and add the
33 |
34 | #
35 | client = AnomalyDetectorClient(ANOMALY_DETECTOR_ENDPOINT, CognitiveServicesCredentials(SUBSCRIPTION_KEY))
36 | #
37 |
38 | # Load in the time series data file
39 |
40 | #
41 | series = []
42 | data_file = pd.read_csv(TIME_SERIES_DATA_PATH, header=None, encoding='utf-8', parse_dates=[0])
43 | for index, row in data_file.iterrows():
44 | series.append(Point(timestamp=row[0], value=row[1]))
45 | #
46 |
47 | # Create a request from the data file
48 |
49 | #
50 | request = Request(series=series, granularity=Granularity.daily)
51 | #
52 |
53 |
54 | # detect anomalies throughout the entire time series, as a batch
55 |
56 | #
57 | print('Detecting anomalies in the entire time series.')
58 |
59 | try:
60 | response = client.entire_detect(request)
61 | except Exception as e:
62 | if isinstance(e, APIErrorException):
63 | print('Error code: {}'.format(e.error.code),
64 | 'Error message: {}'.format(e.error.message))
65 | else:
66 | print(e)
67 |
68 | if True in response.is_anomaly:
69 | print('An anomaly was detected at index:')
70 | for i in range(len(response.is_anomaly)):
71 | if response.is_anomaly[i]:
72 | print(i)
73 | else:
74 | print('No anomalies were detected in the time series.')
75 | #
76 |
77 | # Detect the anomaly status of the latest data point
78 |
79 | #
80 | print('Detecting the anomaly status of the latest data point.')
81 |
82 | try:
83 | response = client.last_detect(request)
84 | except Exception as e:
85 | if isinstance(e, APIErrorException):
86 | print('Error code: {}'.format(e.error.code),
87 | 'Error message: {}'.format(e.error.message))
88 | else:
89 | print(e)
90 |
91 | if response.is_anomaly:
92 | print('The latest point is detected as anomaly.')
93 | else:
94 | print('The latest point is not detected as anomaly.')
95 | #
96 |
97 | # Detect change points
98 |
99 | #
100 |
101 | print('Detecting change points in the entire time series.')
102 |
103 | try:
104 | response = client.detect_change_point(request)
105 | except AnomalyDetectorError as e:
106 | print('Error code: {}'.format(e.error.code), 'Error message: {}'.format(e.error.message))
107 | except Exception as e:
108 | print(e)
109 |
110 | if any(response.is_change_point):
111 | print('An change point was detected at index:')
112 | for i, value in enumerate(response.is_change_point):
113 | if value:
114 | print(i)
115 | else:
116 | print('No change point were detected in the time series.')
117 | #
--------------------------------------------------------------------------------
/samples-univariate/sdk/ruby-sdk-sample.rb:
--------------------------------------------------------------------------------
1 | require "azure_cognitiveservices_anomalydetector"
2 | require "date"
3 | require "csv"
4 |
5 | CognitiveServicesCredentials = MsRestAzure::CognitiveServicesCredentials
6 | AnomalyDetectorClient = Azure::CognitiveServices::AnomalyDetector::V1_0::AnomalyDetectorClient
7 | Request = Azure::CognitiveServices::AnomalyDetector::V1_0::Models::Request
8 | Point = Azure::CognitiveServices::AnomalyDetector::V1_0::Models::Point
9 | Granularity = Azure::CognitiveServices::AnomalyDetector::V1_0::Models::Granularity
10 |
11 | def entire_detect_sample(endpoint, key, request)
12 | puts "Sample of detecting anomalies in the entire series."
13 |
14 | client = AnomalyDetectorClient.new(CognitiveServicesCredentials.new(key))
15 | client.endpoint = endpoint
16 |
17 | result = client.entire_detect(request)
18 |
19 | if result.is_anomaly.include?(true)
20 | puts "Anomaly was detected from the series at index:"
21 | for i in 0 .. request.series.length-1
22 | if result.is_anomaly[i]
23 | print i
24 | print " "
25 | end
26 | end
27 | puts
28 | else
29 | puts "There is no anomaly detected from the series."
30 | end
31 | end
32 |
33 | def last_detect_sample(endpoint, key, request)
34 | puts "Sample of detecting whether the latest point in series is anomaly."
35 |
36 | client = AnomalyDetectorClient.new(CognitiveServicesCredentials.new(key))
37 | client.endpoint = endpoint
38 |
39 | result = client.last_detect(request)
40 |
41 | if result.is_anomaly
42 | puts "The latest point is detected as anomaly."
43 | else
44 | puts "The latest point is not detected as anomaly."
45 | end
46 | end
47 |
48 | def get_series_from_file(path)
49 | result = []
50 | csv = CSV.read(path, headers: false)
51 | csv.each do |item|
52 | p = Point.new()
53 | p.timestamp = DateTime.parse(item[0])
54 | p.value = item[1].to_f
55 | result.append p
56 | end
57 | return result
58 | end
59 |
60 |
61 | endpoint = "[YOUR_ENDPOINT_URL]"
62 | key = "[YOUR_SUBSCRIPTION_KEY]"
63 | path = "[PATH_TO_TIME_SERIES_DATA]"
64 |
65 | # Anomaly detection samples.
66 | begin
67 | series = get_series_from_file(path)
68 | request = Request.new()
69 | request.series = series
70 | request.granularity = Granularity::Daily
71 |
72 | entire_detect_sample(endpoint, key, request)
73 | last_detect_sample(endpoint, key, request)
74 | rescue Exception => e
75 | if e.kind_of?(MsRest::HttpOperationError)
76 | puts "Error code: #{e.body["code"]}"
77 | puts "Error message: #{e.body["message"]}"
78 | else
79 | puts e.message
80 | end
81 | end
82 |
--------------------------------------------------------------------------------
/univariate-live-demo/README.md:
--------------------------------------------------------------------------------
1 | # Anomaly Detection Live Demo Instructions
2 |
3 | 
4 |
5 | Please note: this live demo is only intended to demonstrate the Anomaly Detector API on any CSV file that follows a simple schema. This demo loops over the provided data (with fake timestamps) to demonstrate the anomaly detection API. It does not use the real timestamps provided in the raw data, and should not be used in any production scenario. Once the demo is running, you will be able to see the raw data and the results from the anomaly detection API in the browser. Alternatively, you can also use the online Anomaly Detector [demo page](https://algoevaluation.azurewebsites.net/#/) to evaluate the API on your own data.
6 |
7 | In your CSV file, you need at least two columns. A column for the timestamp and column for the values you want to perform anomaly detection on. In addition, you can include an *optional* dimension column that allows you to run the anomaly detection API across different categories (e.g. different sensors, different regions, or even different variables) through a drop down selection menu in the demo. The timestamp column should be in the ISO 8601 format `YYYY-MM-DDTHH:MM:SSZ`. If you intend to use your own CSV, please see the `sensor_data.csv` file for how to structure that CSV file.
8 |
9 | ## Step 1 - Setup
10 |
11 | The demo should work in any Linux-based environment using any integrated development environment (IDE). If you choose to use Azure Machine Learning (AML) compute, you should install [VS Code](https://code.visualstudio.com/) locally to be able to run this demo. **If you are not using AML, just skip ahead to Step 2**.
12 |
13 | With VS Code installed, navigate to your AML resource in the Azure portal, and click on `Launch Studio`. On the left, click on the `Compute` tab. Finally, under the `Applications` column, click the `VS Code` link corresponding to the compute instance that you would like to use. When asked by VS Code to `Allow an extension to open this URI?` click `Open`.
14 |
15 | ## Step 2 - Creating the virtual environment and installing the dependencies
16 |
17 | Open a terminal in VS Code, and run the following command to create and activate a new conda virtual environment and install the dependencies:
18 |
19 | ```bash
20 | conda create --name anomaly-detection-demo python=3.7
21 | source activate anomaly-detection-demo
22 | pip install -r requirements.txt
23 | ```
24 |
25 | Next, enter the following two lines in your terminal, replacing `` and `` with the values from your Anomaly Detector resource. You can find them by navigating to your Anomaly Detector resource in the Azure portal, and copying the Anomaly Detector key and endpoint from the "Keys and endpoints" section of the resource details.
26 |
27 | ```bash
28 | export ANOMALY_DETECTOR_API_KEY=
29 | export ANOMALY_DETECTOR_ENDPOINT=
30 | ```
31 |
32 | ## Step 3 - Adjusting the configuration (Optional)
33 |
34 | If you plan to use your own CSV to run this demo, take some time to review and adjust the default configuration in the `demo.py` file. For the purposes of this demo, the default configuration is stored within `demo.py` as a Python data class with default values. Feel free to modify the values to match your own needs.
35 |
36 | ```python
37 | csv_name: str = "sensor_data.csv" # Name of the csv file containing the data
38 | value_column: str = "sensor_readings" # Name of the column containing the values
39 | timestamp_column: str = "timestamp" # Name of the column containing the timestamps
40 | dimension_column: str = "sensor_name" # (Optional) Name of the column containing a dimension (e.g. sensor name, or location, etc). If your data does not have this column, set it to None.
41 | window_size: int = 50 # Size of the window used to compute the anomaly score
42 | minute_resample: int = 5 # Resample the data to this minute resolution
43 | ad_mode: str = "entire" # Anomaly detection mode to use. Can be "entire" for batch mode or "last" for last point mode.
44 | ```
45 | If your data does not have the optional dimension column, please set the `dimension_column` in the `demo.py` script to `None`.
46 |
47 | ## Step 4 - Running the demo
48 |
49 | Finally, navigate to this directory in your terminal. Make sure you have the `anomaly-detection-demo` virtual environment activated, then run the following command:
50 |
51 | ```bash
52 | bokeh serve --port 5599 --show demo.py
53 | ```
54 |
55 | A browser tab should open, and the demo should start running. Any detected anomaly will show in red. If the browser tab doesn't open, try to open it manually by navigating to `http://localhost:5599/demo` in the browser. If the demo does not start running, please double-check that you followed the steps above correctly.
56 |
57 | The demo will continue to run until you stop it. To stop the demo, close the browser tab and enter `Ctrl+C` in the terminal to stop the process.
--------------------------------------------------------------------------------
/univariate-live-demo/demo.py:
--------------------------------------------------------------------------------
1 | # -------------------------------------------------------------
2 | #
3 | # Copyright (c) Microsoft Corporation. All rights reserved.
4 | #
5 | # -------------------------------------------------------------
6 |
7 | import datetime
8 | import os
9 | from dataclasses import dataclass
10 | from datetime import timedelta, timezone
11 |
12 | import pandas as pd
13 | from bokeh.driving import count
14 | from bokeh.layouts import column, row
15 | from bokeh.models import ColumnDataSource, Select, Slider
16 | from bokeh.plotting import curdoc, figure
17 | from dateutil.parser import parse
18 |
19 | from utils import ADTimeSeries, UnivariateAnomalyDetector
20 |
21 |
22 | @dataclass
23 | class Config:
24 | """
25 | Dataclass to store the default configuration for the demo. Please change the values if you
26 | want to use your own data.
27 | """
28 |
29 | csv_name: str = "sensor_data.csv" # Name of the csv file containing the data
30 | value_column: str = "sensor_readings" # Name of the column containing the values
31 | timestamp_column: str = "timestamp" # Name of the column containing the timestamps
32 | dimension_column: str = "sensor_name" # (Optional) Name of the column containing a dimension (e.g. sensor name, or location, etc). If your data does not have this column, set it to None.
33 | window_size: int = 50 # Size of the window used to compute the anomaly score
34 | minute_resample: int = 5 # Resample the data to this minute resolution
35 | ad_mode: str = "entire" # Anomaly detection mode to use. Can be "entire" for batch mode or "last" for last point mode.
36 |
37 |
38 | class MissingEnvironmentVariable(Exception):
39 | """
40 | Exception to be thrown when a required environment variable is not set.
41 | """
42 |
43 | pass
44 |
45 |
46 | # Read environment variables:
47 | apikey = os.getenv("ANOMALY_DETECTOR_API_KEY")
48 | endpoint = os.getenv("ANOMALY_DETECTOR_ENDPOINT")
49 |
50 | if apikey is None or endpoint is None:
51 | raise MissingEnvironmentVariable(
52 | "Please ensure ANOMALY_DETECTOR_API_KEY and ANOMALY_DETECTOR_ENDPOINT environment variables are set!"
53 | )
54 |
55 | # Read CSV:
56 | try:
57 | df = pd.read_csv(Config.csv_name)
58 | except FileNotFoundError:
59 | raise FileNotFoundError(
60 | f"Please ensure the file {Config.csv_name} exists in the current directory!"
61 | )
62 |
63 | # Validate the configuration:
64 | if Config.timestamp_column not in df.columns:
65 | raise ValueError("Please ensure the timestamp column is present in the CSV!")
66 | elif Config.value_column not in df.columns:
67 | raise ValueError("Please ensure the value column is present in the CSV!")
68 |
69 | if Config.dimension_column is None:
70 | Config.dimension_column = "dimension"
71 | df[[Config.dimension_column]] = "main_dimension"
72 | else:
73 | if Config.dimension_column not in df.columns:
74 | raise ValueError(
75 | f"Please ensure the dimension column is present in the CSV! ({Config.dimension_column})"
76 | )
77 |
78 | # Extract the relevant columns:
79 | df = df[[Config.timestamp_column, Config.dimension_column, Config.value_column]]
80 |
81 | # Drop rows with NaNs:
82 | df.dropna(inplace=True)
83 |
84 | # Pivot dataframe to show time vs. sensor data
85 | df = df.pivot_table(
86 | index=Config.timestamp_column,
87 | columns=Config.dimension_column,
88 | values=Config.value_column,
89 | aggfunc="mean",
90 | )
91 |
92 | # Parse timestamp column
93 | df.index = df.index.map(lambda x: parse(str(x).replace("@", "")))
94 |
95 | # Convert index to datatime:
96 | df.index = pd.to_datetime(df.index)
97 |
98 | source = ColumnDataSource(
99 | dict(
100 | time=[],
101 | timestamp=[],
102 | timestamp_str=[],
103 | values=[],
104 | expectedValues=[],
105 | upperband=[],
106 | lowerband=[],
107 | isAnomaly=[],
108 | color=[],
109 | )
110 | )
111 |
112 | p = figure(
113 | height=500,
114 | width=1200,
115 | tools="xpan,xwheel_zoom,xbox_zoom,reset",
116 | x_axis_type="datetime",
117 | y_axis_location="right",
118 | )
119 |
120 | p.x_range.follow = "end"
121 | p.y_range.start = 0
122 | p.xaxis.axis_label = "Time"
123 |
124 | p.line(
125 | x="timestamp",
126 | y="values",
127 | alpha=0.8,
128 | line_width=2,
129 | color="navy",
130 | source=source,
131 | legend_label="Measured Value",
132 | )
133 |
134 | p.line(
135 | x="timestamp",
136 | y="expectedValues",
137 | alpha=0.8,
138 | line_width=2,
139 | color="orange",
140 | source=source,
141 | legend_label="Expected Value",
142 | )
143 |
144 | p.circle(
145 | "timestamp",
146 | "values",
147 | size=5,
148 | color="color",
149 | alpha=1,
150 | source=source,
151 | legend_label="Data points",
152 | )
153 |
154 | p.segment(
155 | x0="timestamp",
156 | y0="lowerband",
157 | x1="timestamp",
158 | y1="upperband",
159 | line_width=10,
160 | alpha=0.4,
161 | color="orange",
162 | source=source,
163 | legend_label="Expected Range",
164 | )
165 |
166 | p.line(
167 | x="timestamp",
168 | y="upperband",
169 | alpha=0.4,
170 | line_width=1,
171 | color="orange",
172 | source=source,
173 | )
174 |
175 | p.line(
176 | x="timestamp",
177 | y="lowerband",
178 | alpha=0.4,
179 | line_width=1,
180 | color="orange",
181 | source=source,
182 | )
183 |
184 | p.legend.location = "top_left"
185 |
186 | sensitivity = Slider(title="sensitivity", value=95, start=0, end=99, step=1)
187 | max_anomaly_ratio = Slider(
188 | title="max_anomaly_ratio", value=0.20, start=0, end=1, step=0.05
189 | )
190 |
191 | sensor_names = list(df.columns)
192 | scenario = Select(value=sensor_names[-1], options=sensor_names)
193 | adclient = UnivariateAnomalyDetector(key=apikey, endpoint=endpoint)
194 |
195 |
196 | def _get_value(t):
197 | """
198 | Loops over the series contiuosly based on the scenario selected by the user
199 | """
200 | return df[scenario.value][t % len(df)]
201 |
202 |
203 | def _get_timestamp(t):
204 | """
205 | Generates a fake timestamp
206 | """
207 | timestamp = datetime.datetime(2015, 1, 1, tzinfo=timezone.utc) + timedelta(
208 | minutes=Config.minute_resample * t
209 | )
210 | return timestamp, timestamp.isoformat().split("+")[0] + "Z"
211 |
212 |
213 | def _call_ad_api(t):
214 | """
215 | Creates a request and calls the anomaly detector API, then processes and returns the response
216 | """
217 | values = source.data["values"][-Config.window_size :]
218 | timestamps = source.data["timestamp_str"][-Config.window_size :]
219 | request = {}
220 | request["series"] = []
221 | for i in range(Config.window_size):
222 | request["series"].append({"value": values[i], "timestamp": timestamps[i]})
223 |
224 | request["granularity"] = "minutely"
225 | request["maxAnomalyRatio"] = max_anomaly_ratio.value
226 | request["sensitivity"] = sensitivity.value
227 | request["customInterval"] = Config.minute_resample
228 |
229 | # validate that the request is valid:
230 | request = ADTimeSeries(request)
231 | request.validate()
232 |
233 | response = adclient.detect_anomaly(mode=Config.ad_mode, data_dict=request)
234 |
235 | if Config.ad_mode == "entire":
236 | response["expected_value"] = response["expected_values"][-1]
237 | response["upper_margin"] = response["upper_margins"][-1]
238 | response["lower_margin"] = response["lower_margins"][-1]
239 | response["is_anomaly"] = response["is_anomaly"][-1]
240 |
241 | print(
242 | f"Point: {str(t)}: Is anomaly? {response['is_anomaly']} -- Expected value: {response['expected_value']}"
243 | )
244 |
245 | upperband = response["expected_value"] + response["upper_margin"]
246 | lowerband = response["expected_value"] - response["lower_margin"]
247 |
248 | if response["is_anomaly"]:
249 | color = "red"
250 | else:
251 | color = "navy"
252 |
253 | return (
254 | response["expected_value"],
255 | upperband,
256 | lowerband,
257 | response["is_anomaly"],
258 | color,
259 | )
260 |
261 |
262 | @count()
263 | def update(t):
264 | value = _get_value(t)
265 | ts, ts_str = _get_timestamp(t)
266 |
267 | if t > Config.window_size:
268 | # we have enough data to send an API request
269 | expectedValue, upperband, lowerband, isAnomaly, color = _call_ad_api(t)
270 | else:
271 | # Use default values for the first few points
272 | expectedValue, upperband, lowerband, isAnomaly, color = 0, 0, 0, False, None
273 |
274 | new_data = dict(
275 | time=[t],
276 | timestamp=[ts],
277 | timestamp_str=[ts_str],
278 | expectedValues=[expectedValue],
279 | values=[value],
280 | upperband=[upperband],
281 | lowerband=[lowerband],
282 | isAnomaly=[isAnomaly],
283 | color=[color],
284 | )
285 |
286 | p.title.text = f"Live anomaly detection results | Sensitivity: {sensitivity.value} | Maximum Anomaly Ratio: {max_anomaly_ratio.value}"
287 | source.stream(
288 | new_data, rollover=100
289 | ) # the rollover number must be > window size. It governs the amount of data visible in the window.
290 |
291 |
292 | curdoc().add_root(column(row(max_anomaly_ratio, sensitivity, scenario), p))
293 | curdoc().add_periodic_callback(update, 100)
294 | curdoc().title = "Anomaly Detector API Demo"
295 |
--------------------------------------------------------------------------------
/univariate-live-demo/image.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/AnomalyDetector/90fdcabfd1760dcc49321956b623a705d4632e6b/univariate-live-demo/image.webp
--------------------------------------------------------------------------------
/univariate-live-demo/requirements.txt:
--------------------------------------------------------------------------------
1 | adal==1.2.7
2 | azure-ai-anomalydetector==3.0.0b5
3 | azure-core==1.24.1
4 | azureml-core==1.41.0
5 | black
6 | bokeh
7 | bokeh==2.4.3
8 | ipykernel==6.15.0
9 | ipympl
10 | jupyter
11 | jupyter_bokeh
12 | jupyter_contrib_nbextensions
13 | jupyterlab
14 | numpy
15 | pandas
16 | pip==23.3
17 | pydantic
18 | python-dateutil==2.8.2
19 | requests
20 | tqdm
21 | typing-extensions>=3.10.0.0
--------------------------------------------------------------------------------
/univariate-live-demo/utils.py:
--------------------------------------------------------------------------------
1 | # -------------------------------------------------------------
2 | #
3 | # Copyright (c) Microsoft Corporation. All rights reserved.
4 | #
5 | # -------------------------------------------------------------
6 |
7 | import json
8 | import logging
9 | import os
10 | import re
11 | from datetime import datetime
12 | from typing import ClassVar, Optional, Union
13 |
14 | from pydantic import BaseModel
15 |
16 | from azure.ai.anomalydetector import AnomalyDetectorClient
17 | from azure.ai.anomalydetector.models import DetectRequest, TimeGranularity
18 | from azure.core.credentials import AzureKeyCredential
19 | from azure.core.exceptions import HttpResponseError
20 |
21 |
22 | logger = logging.getLogger(__name__)
23 |
24 |
25 | class ADTimeSeries:
26 | """Class to read, format, and validate the data for the anomaly detector service.
27 |
28 | Note: the format of the data must match the following:
29 | data = {
30 | "period": 2 # OPTIONAL: Specifying this value can reduce anomaly detection latency by up to 50%. The
31 | period is an integer that specifies roughly how many data points the time series takes to repeat a pattern
32 | "series": [ # REQUIRED: list containing Time series data points. This should be sorted by timestamp in
33 | # ascending order to match the anomaly detection result. If the data is not sorted correctly or
34 | # there is a duplicate timestamp, the API will not work. In such case, an error message will be
35 | # returned. Each data point must have at least a "timestamp" in iso-8601 format and a "value"
36 | {"timestamp": "2014-12-07T21:00:00Z", "value": 7290.0979278853},
37 | {"timestamp": "2014-12-07T22:00:00Z", "value": 7884.4973233823}
38 | ],
39 | "granularity": "hourly", # REQUIRED: The sampling rate of the data. Must contain one of the granularities
40 | listed below.
41 | "sensitivity": 99 # OPTIONAL: This advanced model parameter in an integer between 0-99 that represents
42 | the sensitivity of the AD algorithm. The lower the value, the larger the margin will be, meaning
43 | fewer anomalies will be detected.
44 | "maxAnomalyRatio": 0.25 # OPTIONAL: This is an advanced model parameter is a float between 0 and 1
45 | representing the maximum anomaly ratio.
46 | "customInterval": is optional and used to set non-standard time interval. For example, if the series is
47 | 5 minutes, the request can be set as: {"granularity":"minutely", "customInterval":5}
48 | }
49 | """
50 |
51 | GRANULARITIES: ClassVar[dict] = {
52 | "yearly": TimeGranularity.YEARLY,
53 | "monthly": TimeGranularity.MONTHLY,
54 | "weekly": TimeGranularity.WEEKLY,
55 | "daily": TimeGranularity.DAILY,
56 | "hourly": TimeGranularity.HOURLY,
57 | "minutely": TimeGranularity.PER_MINUTE,
58 | "secondly": TimeGranularity.PER_SECOND,
59 | }
60 |
61 | def __init__(self, data: Union[dict, None] = None):
62 | self.data = data
63 | self.validated = False
64 |
65 | def validate(self):
66 | """Validates that self.data is in the correct format for the anomaly detector service.
67 |
68 | For documentation on the correct format, see: https://docs.microsoft.com/en-us/azure/
69 | cognitive-services/anomaly-detector/concepts/anomaly-detection-best-practices#data-preparation
70 |
71 | Once validation is done, the self.validated flag is set to True.
72 | """
73 | try:
74 | # Series:
75 | series = self.data.get("series", None)
76 | series_len = len(series) if series else 0
77 | if series_len < 12 or series_len > 8460:
78 | raise AssertionError(
79 | f"The length of series must be in the range [12-8460] but is {series_len}"
80 | )
81 | utc_pattern = re.compile(
82 | r"\b[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z\b"
83 | ) # ref: https://stackoverflow.com/questions/25568134/regex-to-verify-utc-date-time-format
84 |
85 | timestamp = None
86 | sort_datapoints = False
87 |
88 | for entry in self.data["series"]:
89 | # Validate timestamp format, and make sure the entries are sorted in ascending order
90 | entry_dict = entry.dict() if isinstance(entry, BaseModel) else entry
91 | current_timestamp = datetime.strptime(
92 | entry["timestamp"], "%Y-%m-%dT%H:%M:%SZ"
93 | )
94 |
95 | # Check if datapoints are sorted correctly:
96 | if timestamp is not None:
97 | if not current_timestamp >= timestamp:
98 | sort_datapoints = True
99 | logger.debug(
100 | "Datapoints are not in ascending order! Will sort points in ascending order.. "
101 | )
102 |
103 | timestamp = current_timestamp
104 | if not utc_pattern.match(entry_dict["timestamp"]):
105 | raise AssertionError("timestamp data does not match UTC format.")
106 | if "value" not in entry_dict:
107 | raise AssertionError("data['series'] entry is missing value field.")
108 | if not isinstance(entry_dict["value"], float):
109 | raise AssertionError("'value' should be of type float.")
110 |
111 | if sort_datapoints:
112 | self.data["series"].sort(key=lambda x: x["timestamp"], reverse=False)
113 |
114 | # Granularity:
115 | if "granularity" not in self.data: # Required field
116 | raise AssertionError("self.data missing required 'granularity' field.")
117 | self.data["granularity"] = self.data["granularity"].lower()
118 | if self.data["granularity"] not in list(ADTimeSeries.GRANULARITIES):
119 | raise AssertionError(
120 | "granularity value is not one of those listed in ADTimeSeries.GRANULARITIES."
121 | )
122 |
123 | # Custom interval:
124 | if "customInterval" not in self.data:
125 | # 'custom_interval' is optional and used to set non-standard time interval.
126 | # For example, if the series is 5 minutes, the request can be set as:
127 | # {"granularity":"minutely", "customInterval":5}
128 | self.data["customInterval"] = 1
129 |
130 | except AssertionError as err:
131 | logger.error(err, exc_info=True)
132 | raise AssertionError(err) from err
133 |
134 | self.validated = True
135 |
136 | @classmethod
137 | def from_json_path(cls, filepath: str):
138 | """Read data from a JSON file.
139 |
140 | Args:
141 | filepath (str): Path to a JSON file containing the data.
142 |
143 | Returns:
144 | (AnomalyDetectorSeries): an AnomalyDetectorSeries object containing the data.
145 | """
146 | with open(filepath, encoding="utf-8") as file_handle:
147 | data = cls(json.load(file_handle))
148 | data.validate()
149 | return data
150 |
151 |
152 | class UnivariateAnomalyDetector:
153 | """Class to interact with the Azure Anomaly Detector Service."""
154 |
155 | def __init__(self, key: str = None, endpoint: str = None):
156 | self.key = key
157 | self.endpoint = endpoint
158 | self.client: AnomalyDetectorClient = None
159 | logger.debug("Instantiated UnivariateAnomalyDetector object.")
160 |
161 | def connect(self):
162 | """Reads key and endpoint environment variables and creates AnomalyDetectorClient client object."""
163 | if self.key is None:
164 | logger.debug(
165 | "self.key is None -- reading the key from the environment variable."
166 | )
167 | self.key = os.getenv("ANOMALY_DETECTOR_KEY", default=None)
168 | if self.endpoint is None:
169 | logger.debug(
170 | "self.endpoint is None -- reading the endpoint from the environment variable."
171 | )
172 | self.endpoint = os.getenv("ANOMALY_DETECTOR_ENDPOINT", default=None)
173 |
174 | if self.key and self.endpoint:
175 | self.client = AnomalyDetectorClient(
176 | AzureKeyCredential(self.key), self.endpoint
177 | )
178 | logger.info("Successfully instantiated AnomalyDetectorClient object.")
179 | else:
180 | msg = "The key or endpoint for the Anomaly Detector resource are missing."
181 | logger.error(msg)
182 | raise ValueError(msg)
183 |
184 | def detect_anomaly(
185 | self, mode: str, data_dict: Union[dict, ADTimeSeries]
186 | ) -> Optional[dict]:
187 | """Create and send a request to the Anomaly Detector API.
188 |
189 | Args:
190 | mode (str): The API accepts three modes of detection: 'entire', 'last', 'change'.
191 | data_dict (Union[dict, ADTimeSeries]): a dictionary or a ADTimeSeries containing the time
192 | series data.
193 |
194 | Returns:
195 | dict: the result from the anomaly detector API call. The feilds in this result depend on the mode:
196 |
197 | - 'entire' mode: ['period','expected_values', 'upper_margins', 'lower_margins', 'is_anomaly',
198 | 'is_negative_anomaly','is_positive_anomaly']
199 |
200 | - 'last' mode: ['period', 'suggested_window', 'expected_value', 'upper_margin', 'lower_margin',
201 | 'is_anomaly', 'is_negative_anomaly', 'is_positive_anomaly'])
202 |
203 | - 'change' mode: ['period', 'is_change_point', 'confidence_scores']
204 |
205 | """
206 |
207 | if self.client is None:
208 | logger.debug("self.client is None -- Calling self.connect()..")
209 | self.connect()
210 |
211 | if isinstance(data_dict, ADTimeSeries):
212 | if not data_dict.validated: # Make sure the data is valid.
213 | raise AssertionError("data_dict is not validated.")
214 | data_dict = (
215 | data_dict.data
216 | ) # Extract the data, we don't need ADTimeSeries anymore.
217 |
218 | # Create the request:
219 | request = DetectRequest(
220 | series=data_dict["series"], granularity=data_dict["granularity"]
221 | )
222 |
223 | try:
224 | if mode == "entire":
225 | logger.debug("Calling Anomaly Detector API in entire data series mode.")
226 | return self.client.detect_entire_series(request).as_dict()
227 |
228 | if mode == "last":
229 | logger.debug("Calling Anomaly Detector API in last point mode.")
230 | return self.client.detect_last_point(request).as_dict()
231 |
232 | if mode == "change":
233 | logger.debug(
234 | "Calling Anomaly Detector API in detect change point mode."
235 | )
236 | return self.client.detect_change_point(request).as_dict()
237 |
238 | err_message = "Unknown anomaly mode: " + mode
239 | logger.error(err_message)
240 | raise ValueError(err_message)
241 |
242 | except HttpResponseError as err:
243 | logger.error(err, exc_info=True)
244 | raise HttpResponseError from err
245 |
--------------------------------------------------------------------------------