├── .DS_Store
├── .github
├── ISSUE_TEMPLATE.md
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE.md
├── README.md
├── Tutorials
├── Bing-Image-Search
│ ├── README.md
│ ├── app.js
│ ├── package.json
│ └── public
│ │ ├── css
│ │ └── style.css
│ │ ├── index.html
│ │ └── js
│ │ └── script.js
├── Bing-Visual-Search
│ ├── BingVisualSearchApp.html
│ ├── BingVisualSearchCropImage.cs
│ ├── BingVisualSearchInsightsTokens.cs
│ ├── BingVisualSearchUploadImage.html
│ └── cognitive-services-bing-news-search-signup-requirements.md
├── Bing-Web-Search
│ ├── README.md
│ ├── bing-web-search.js
│ ├── package.json
│ └── public
│ │ ├── css
│ │ └── styles.css
│ │ ├── index.html
│ │ └── js
│ │ └── script.js
├── BingAutosuggestApp.html
├── BingEntitySearchApp.html
├── BingGetSimilarImages.cs
├── BingImageSearchApp.html
├── BingNewsSearchApp.html
├── BingSpellCheckApp.html
└── BingVideoSearchApp.html
├── curl
├── Knowledge
│ └── QnA-Maker
│ │ └── query-endpoint.sh
├── Language
│ ├── native-document-pii.docx
│ ├── native-document-pii.pdf
│ ├── native-document-summarization.docx
│ └── native-document-summarization.pdf
├── Translator
│ ├── document-translation-sample.docx
│ ├── document-translation-sample.pdf
│ └── translate-with-glossary.json
└── form-recognizer
│ ├── 1040-training-data.zip
│ ├── 1099-training-data.zip
│ ├── DriverLicense.png
│ ├── IDResult-DriverLicense.json
│ ├── IDResult-Passport.json
│ ├── Invoice-6.pdf
│ ├── Invoice_1.pdf.labels.json
│ ├── Invoice_1.pdf.ocr.json
│ ├── Passport.png
│ ├── analyze-result-invoice-6.pdf.json
│ ├── barcodes.png
│ ├── business-card-english.jpg
│ ├── business-card-result.json
│ ├── businessCard.png
│ ├── consent-form-training-dataset.zip
│ ├── contoso-allinone.jpg
│ ├── contoso-receipt.png
│ ├── cosent-form-test-document.pdf
│ ├── covid-informed-consent-for-inactivated-immunization-universal-2020-v.3.-covid-screening-vaccine-questions.pdf
│ ├── custom-vaccine
│ └── after-uploading-files.png
│ ├── invoice-logic-apps-tutorial.pdf
│ ├── invoice_sample.jpg
│ ├── layout-page-001.jpg
│ ├── receipt-result.json
│ ├── rest-api
│ ├── business_card.jpg
│ ├── general_documents_sample_response.json
│ ├── identity_documents.png
│ ├── insurance-card.png
│ ├── invoice.pdf
│ ├── layout.png
│ ├── read.png
│ ├── receipt.png
│ └── w2.png
│ ├── restructure-power-bi.png
│ ├── restructure.png
│ ├── sample-invoice-output.json
│ ├── sample-invoice.pdf
│ ├── sample-layout-output.json
│ ├── sample-layout.pdf
│ ├── sample_data.zip
│ ├── sample_data_with_labels.zip
│ ├── sample_data_without_labels.zip
│ ├── simple-invoice.png
│ ├── w8-training-data.zip
│ └── w9-training-data.zip
├── dotnet
├── Language
│ ├── BingSpellCheckv7.cs
│ ├── README.md
│ └── TextAnalyticsSentiment.cs
├── Search
│ ├── BingAutosuggestv7.cs
│ ├── BingCustomSearchv7.cs
│ ├── BingEntitySearchv7.cs
│ ├── BingImageSearchInsights.cs
│ ├── BingImageSearchPostv7.cs
│ ├── BingImageSearchv7.cs
│ ├── BingImageSearchv7Quickstart.cs
│ ├── BingNewsSearchv7.cs
│ ├── BingSpellCheckv7.cs
│ ├── BingVideoSearchv7.cs
│ ├── BingVisualSearchv7.cs
│ ├── BingWebSearchv7.cs
│ └── README.MD
└── Vision
│ ├── ComputerVision
│ ├── AnalyzeImage
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.AnalyzeImage.csproj
│ │ └── Program.cs
│ ├── BatchReadFile
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.BatchReadFile.csproj
│ │ └── Program.cs
│ ├── ComputerVision.sln
│ ├── ComputerVisionSample
│ ├── DescribeImage
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.DescribeImage.csproj
│ │ └── Program.cs
│ ├── DetectObjects
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.DetectObjects.csproj
│ │ └── Program.cs
│ ├── GetAreaOfInterest
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.GetAreaOfInterest.csproj
│ │ └── Program.cs
│ ├── GetThumbnail
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.GetThumbnail.csproj
│ │ └── Program.cs
│ ├── OCR
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.OCR.csproj
│ │ └── Program.cs
│ ├── README.md
│ ├── RecognizeDomainSpecificContent
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.RecognizeDomainSpecificContent.csproj
│ │ └── Program.cs
│ ├── RecognizeText
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.RecognizeText.csproj
│ │ └── Program.cs
│ └── TagImage
│ │ ├── Microsoft.Azure.CognitiveServices.Samples.ComputerVision.TagImage.csproj
│ │ └── Program.cs
│ ├── Face
│ ├── Face1.0.cs
│ └── README.md
│ └── InkRecognition
│ ├── README.md
│ ├── quickstart
│ ├── example-ink-strokes.json
│ ├── example-response.json
│ └── recognizeInk.cs
│ ├── uwp-app
│ ├── CHANGELOG.md
│ ├── CONTRIBUTING.md
│ ├── LICENSE.md
│ ├── README.md
│ └── cs
│ │ ├── App.xaml
│ │ ├── App.xaml.cs
│ │ ├── Assets
│ │ ├── SplashScreen.scale-100.png
│ │ ├── SplashScreen.scale-125.png
│ │ ├── SplashScreen.scale-150.png
│ │ ├── SplashScreen.scale-200.png
│ │ └── SplashScreen.scale-400.png
│ │ ├── HTTP
│ │ └── HttpManager.cs
│ │ ├── JSON
│ │ ├── Converter
│ │ │ └── InkRecognitionResponseConverter.cs
│ │ ├── Format
│ │ │ ├── Alternates.cs
│ │ │ ├── HttpErrorDetails.cs
│ │ │ ├── InkBullet.cs
│ │ │ ├── InkDrawing.cs
│ │ │ ├── InkLine.cs
│ │ │ ├── InkListItem.cs
│ │ │ ├── InkParagraph.cs
│ │ │ ├── InkRecognitionRequest.cs
│ │ │ ├── InkRecognitionResponse.cs
│ │ │ ├── InkRecognitionUnit.cs
│ │ │ ├── InkRecognizerStroke.cs
│ │ │ ├── InkWord.cs
│ │ │ ├── InkWritingRegion.cs
│ │ │ ├── PointDetailsPattern.cs
│ │ │ ├── RecognitionUnitKind.cs
│ │ │ └── Rectangle.cs
│ │ └── JSONProcessor.cs
│ │ ├── NoteTaker.csproj
│ │ ├── NoteTaker.sln
│ │ ├── NoteTaker.xaml
│ │ ├── NoteTaker.xaml.cs
│ │ ├── OutputWriter.cs
│ │ ├── Package.appxmanifest
│ │ ├── Properties
│ │ ├── AssemblyInfo.cs
│ │ └── Default.rd.xml
│ │ └── Services
│ │ └── Ink
│ │ ├── InkRecognitionRoot.cs
│ │ └── InkRecognizer.cs
│ └── wpf-app
│ ├── CHANGELOG.md
│ ├── README.md
│ └── src
│ ├── App.config
│ ├── App.xaml
│ ├── App.xaml.cs
│ ├── HTTP
│ └── HttpManager.cs
│ ├── JSON
│ ├── Converter
│ │ └── InkRecognitionResponseConverter.cs
│ ├── Format
│ │ ├── Alternates.cs
│ │ ├── HttpErrorDetails.cs
│ │ ├── InkBullet.cs
│ │ ├── InkDrawing.cs
│ │ ├── InkLine.cs
│ │ ├── InkListItem.cs
│ │ ├── InkParagraph.cs
│ │ ├── InkRecognitionRequest.cs
│ │ ├── InkRecognitionResponse.cs
│ │ ├── InkRecognitionUnit.cs
│ │ ├── InkRecognizerStroke.cs
│ │ ├── InkWord.cs
│ │ ├── InkWritingRegion.cs
│ │ ├── PointDetailsPattern.cs
│ │ ├── RecognitionUnitKind.cs
│ │ └── Rectangle.cs
│ └── JSONProcessor.cs
│ ├── MainWindow.xaml
│ ├── MainWindow.xaml.cs
│ ├── NoteTaker.csproj
│ ├── NoteTaker.sln
│ ├── OutputWriter.cs
│ ├── Properties
│ ├── AssemblyInfo.cs
│ ├── Resources.Designer.cs
│ ├── Resources.resx
│ ├── Settings.Designer.cs
│ └── Settings.settings
│ ├── Services
│ └── Ink
│ │ ├── InkRecognitionRoot.cs
│ │ └── InkRecognizer.cs
│ ├── app.manifest
│ └── packages.config
├── go
└── Search
│ └── BingWebSearchv7.go
├── java
├── InkRecognition
│ ├── README.md
│ ├── android-sample-app
│ │ ├── README.md
│ │ └── Recognizer
│ │ │ ├── .gitignore
│ │ │ ├── app
│ │ │ ├── .gitignore
│ │ │ ├── build.gradle
│ │ │ ├── proguard-rules.pro
│ │ │ └── src
│ │ │ │ └── main
│ │ │ │ ├── AndroidManifest.xml
│ │ │ │ ├── java
│ │ │ │ └── CognitiveServices
│ │ │ │ │ └── Ink
│ │ │ │ │ └── Recognition
│ │ │ │ │ ├── InkBullet.java
│ │ │ │ │ ├── InkDrawing.java
│ │ │ │ │ ├── InkLine.java
│ │ │ │ │ ├── InkListItem.java
│ │ │ │ │ ├── InkParagraph.java
│ │ │ │ │ ├── InkPoint.java
│ │ │ │ │ ├── InkRecognitionDetailsLogger.java
│ │ │ │ │ ├── InkRecognitionError.java
│ │ │ │ │ ├── InkRecognitionUnit.java
│ │ │ │ │ ├── InkRecognitionUnitCategory.java
│ │ │ │ │ ├── InkRecognizer.java
│ │ │ │ │ ├── InkRoot.java
│ │ │ │ │ ├── InkStroke.java
│ │ │ │ │ ├── InkWord.java
│ │ │ │ │ ├── InkWritingRegion.java
│ │ │ │ │ ├── MainActivity.java
│ │ │ │ │ ├── NoteTaker.java
│ │ │ │ │ ├── RecognitionResultStatus.java
│ │ │ │ │ ├── Rectangle.java
│ │ │ │ │ ├── Shape.java
│ │ │ │ │ └── StrokeKind.java
│ │ │ │ └── res
│ │ │ │ ├── drawable-v24
│ │ │ │ └── ic_launcher_foreground.xml
│ │ │ │ ├── drawable
│ │ │ │ └── ic_launcher_background.xml
│ │ │ │ ├── mipmap-anydpi-v26
│ │ │ │ ├── ic_launcher.xml
│ │ │ │ └── ic_launcher_round.xml
│ │ │ │ ├── mipmap-hdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ │ ├── mipmap-mdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ │ ├── mipmap-xhdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ │ ├── mipmap-xxhdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ │ ├── mipmap-xxxhdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ │ ├── values
│ │ │ │ ├── colors.xml
│ │ │ │ ├── dimens.xml
│ │ │ │ ├── strings.xml
│ │ │ │ └── styles.xml
│ │ │ │ └── xml
│ │ │ │ └── backup_descriptor.xml
│ │ │ ├── build.gradle
│ │ │ ├── gradle.properties
│ │ │ ├── gradle
│ │ │ └── wrapper
│ │ │ │ ├── gradle-wrapper.jar
│ │ │ │ └── gradle-wrapper.properties
│ │ │ ├── gradlew
│ │ │ ├── gradlew.bat
│ │ │ └── settings.gradle
│ └── quickstart
│ │ ├── RecognizeInk.java
│ │ ├── example-ink-strokes.json
│ │ └── example-response.json
├── Language
│ ├── BingSpellCheck.java
│ ├── README.md
│ └── TextAnalytics
│ │ ├── .classpath
│ │ ├── .factorypath
│ │ ├── .project
│ │ ├── .settings
│ │ ├── org.eclipse.jdt.apt.core.prefs
│ │ ├── org.eclipse.jdt.core.prefs
│ │ └── org.eclipse.m2e.core.prefs
│ │ ├── .vscode
│ │ └── launch.json
│ │ ├── README.md
│ │ ├── pom.xml
│ │ ├── src
│ │ └── main
│ │ │ └── java
│ │ │ └── com
│ │ │ └── microsoft
│ │ │ └── azure
│ │ │ └── textanalytics
│ │ │ └── samples
│ │ │ └── TextAnalytics.java
│ │ └── target
│ │ └── classes
│ │ └── com
│ │ └── microsoft
│ │ └── azure
│ │ └── textanalytics
│ │ └── samples
│ │ ├── Document.class
│ │ ├── Documents.class
│ │ ├── EntityRecognition.class
│ │ ├── KeyPhraseExtraction.class
│ │ ├── LanguageDetection.class
│ │ ├── SentimentAnalysis.class
│ │ └── TextAnalytics.class
├── Search
│ ├── BingAutosuggestv7.java
│ ├── BingCustomSearchv7.java
│ ├── BingEntitySearchv7.java
│ ├── BingImageSearchv7.java
│ ├── BingImageSearchv7Quickstart.java
│ ├── BingNewsSearchv7.java
│ ├── BingSpellCheck.java
│ ├── BingVideoSearchv7.java
│ ├── BingVisualSearchv7.java
│ ├── BingWebSearchv7.java
│ └── README.md
└── Vision
│ ├── ComputerVision2.1.java
│ ├── Face1.0.java
│ └── README.md
├── javascript
└── InkRecognition
│ ├── javascript-app
│ ├── .gitignore
│ ├── LICENSE.md
│ ├── README.md
│ └── src
│ │ ├── InkAnalysis.js
│ │ ├── config.js
│ │ ├── inkHelper.js
│ │ └── sample.html
│ └── quickstart
│ ├── example-ink-strokes.json
│ ├── example-response.json
│ └── recognize-ink.html
├── nodejs
├── Language
│ ├── BingSpellCheckv7.js
│ └── README.md
├── Search
│ ├── BingAutosuggestv7.js
│ ├── BingCustomSearchv7.js
│ ├── BingEntitySearchv7.js
│ ├── BingImageSearchv7.js
│ ├── BingImageSearchv7Quickstart.js
│ ├── BingNewsSearchv7.js
│ ├── BingSpellCheckv7.js
│ ├── BingVideoSearchv7.js
│ ├── BingVisualSearchv7.js
│ ├── BingWebSearchv7.js
│ └── README.md
└── Vision
│ ├── AnalyzeRemoteImage.js
│ ├── ComputerVisionOCR.js
│ ├── FaceDetectFaces.js
│ └── README.md
├── php
└── Search
│ └── BingWebSearchv7.php
├── python
├── Language
│ ├── BingSpellCheckv7.py
│ └── README.md
├── Search
│ ├── BingAutosuggestv7.py
│ ├── BingCustomSearchv7.py
│ ├── BingEntitySearchv7.py
│ ├── BingImageSearchv7.py
│ ├── BingNewsSearchv7.py
│ ├── BingSpellCheckv7.py
│ ├── BingVideoSearchv7.py
│ ├── BingVisualSearchv7.py
│ ├── BingWebSearchv7.py
│ └── README.md
└── Vision
│ ├── ComputerVision2.1.py
│ ├── Face1.0.py
│ └── README.md
├── ruby
└── Search
│ ├── BingAutosuggestv7.rb
│ ├── BingEntitySearchv7.rb
│ ├── BingImageSearchv7.rb
│ ├── BingNewsSearchv7.rb
│ ├── BingSpellCheckv7.rb
│ ├── BingVideoSearchv7.rb
│ └── BingWebSearchv7.rb
└── swift
└── InkRecognition
├── CONTRIBUTING.md
├── README.md
└── Recognizer
├── Recognizer.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ ├── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
│ └── xcuserdata
│ │ └── elijah.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
└── xcuserdata
│ └── elijah.xcuserdatad
│ └── xcschemes
│ └── xcschememanagement.plist
└── Recognizer
├── AppDelegate.swift
├── Assets.xcassets
├── AppIcon.appiconset
│ └── Contents.json
└── Contents.json
├── Base.lproj
├── LaunchScreen.storyboard
└── Main.storyboard
├── Info.plist
├── InkBullet.swift
├── InkDrawing.swift
├── InkLine.swift
├── InkListItem.swift
├── InkParagraph.swift
├── InkPoint.swift
├── InkRecognitionUnit.swift
├── InkRecognizer.swift
├── InkRendererView.swift
├── InkRoot.swift
├── InkStroke.swift
├── InkWord.swift
├── InkWritingRegion.swift
├── Line.swift
└── ViewController.swift
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/.DS_Store
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
4 | > Please provide us with the following information:
5 | > ---------------------------------------------------------------
6 |
7 | ### This issue is for a: (mark with an `x`)
8 | ```
9 | - [ ] bug report -> please search issues before submitting
10 | - [ ] feature request
11 | - [ ] documentation issue or request
12 | - [ ] regression (a behavior that used to work and stopped in a new release)
13 | ```
14 |
15 | ### Minimal steps to reproduce
16 | >
17 |
18 | ### Any log messages given by the failure
19 | >
20 |
21 | ### Expected/desired behavior
22 | >
23 |
24 | ### OS and Version?
25 | > Windows 7, 8 or 10. Linux (which distribution). macOS (Yosemite? El Capitan? Sierra?)
26 |
27 | ### Versions
28 | >
29 |
30 | ### Mention any other details that might be useful
31 |
32 | > ---------------------------------------------------------------
33 | > Thanks! We'll be in touch soon.
34 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Purpose
2 |
3 | * ...
4 |
5 | ## Does this introduce a breaking change?
6 |
7 | ```
8 | [ ] Yes
9 | [ ] No
10 | ```
11 |
12 | ## Pull Request Type
13 | What kind of change does this Pull Request introduce?
14 |
15 |
16 | ```
17 | [ ] Bugfix
18 | [ ] Feature
19 | [ ] Code style update (formatting, local variables)
20 | [ ] Refactoring (no functional changes, no api changes)
21 | [ ] Documentation content changes
22 | [ ] Other... Please describe:
23 | ```
24 |
25 | ## How to Test
26 | * Get the code
27 |
28 | ```
29 | git clone [repo-address]
30 | cd [repo-name]
31 | git checkout [branch-name]
32 | npm install
33 | ```
34 |
35 | * Test the code
36 |
37 | ```
38 | ```
39 |
40 | ## What to Check
41 | Verify that the following are valid
42 | * ...
43 |
44 | ## Other Information
45 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [project-title] Changelog
2 |
3 |
4 | # x.y.z (yyyy-mm-dd)
5 |
6 | *Features*
7 | * ...
8 |
9 | *Bug Fixes*
10 | * ...
11 |
12 | *Breaking Changes*
13 | * ...
14 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation. All rights reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
22 |
23 | Third Party Programs: The software may include third party programs that Microsoft,
24 | not the third party, licenses to you under this agreement. Notices, if any, for the
25 | third party programs are included for your information only.
26 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Image-Search/README.md:
--------------------------------------------------------------------------------
1 | # Bing Image Search single-page web app
2 |
3 | This single-page web application demonstrates how the Bing Image Search API (an [Azure Cognitive Service](https://docs.microsoft.com/azure/cognitive-services/)) can be used to retrieve, parse, and display relevant image results based on a user's query. This sample compliments the [single-page web app](https://docs.microsoft.com/azure/cognitive-services/bing-image-search/tutorial-bing-image-search-single-page-app) tutorial on docs.microsoft.com.
4 |
5 | The sample app can:
6 |
7 | * Call the Bing Image Search API with search options
8 | * Display image results
9 | * Paginate results
10 | * Manage subscription keys
11 | * Handle errors
12 |
13 | To use this app, you must have a valid Azure subscription. If you don't have one, you can visit [the Microsoft Cognitive Services Web site](https://azure.microsoft.com/free/cognitive-services/), create a new Azure account, and try Cognitive Services for free. After you sign up, you can use the subscription key from creating an [Azure Cognitive Services resource](http://docs.microsoft.com/azure/cognitive-services/cognitive-services-apis-create-account) for the Bing Search APIs.
14 |
15 | ## Prerequisites
16 |
17 | * Node.js 8 or later
18 | * A valid Azure Cognitive Services subscription key for the Bing Search APIs
19 |
20 | ## Get started
21 |
22 | 1. Clone the repository.
23 | 2. Navigate to the Bing Web Search Tutorial directory.
24 | 3. Install Express.js:
25 | `npm install`
26 |
27 | 4. Run the sample app:
28 | `node bing-web-search.js`
29 |
30 | 5. Navigate to the provided URL and perform your first Bing Image Search!
31 |
32 | ## Next steps
33 |
34 | * See the [single-page webapp tutorial](https://docs.microsoft.com/azure/cognitive-services/bing-image-search/tutorial-bing-image-search-single-page-app) that goes along with this sample.
35 | * Explore all of the available [Azure Cognitive Services](https://docs.microsoft.com/azure/cognitive-services/).
36 | * Use [computer vision](https://docs.microsoft.com/azure/cognitive-services/computer-vision/quickstarts-sdk/csharp-analyze-sdk) to quickly analyze an image.
37 | * View the rest of the [Bing Image Search Documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-image-search/).
38 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Image-Search/app.js:
--------------------------------------------------------------------------------
1 | const http = require('http');
2 | const express = require("express");
3 | const app = express();
4 | var port = 8000;
5 |
6 | app.use(express.static("public"));
7 |
8 | app.listen(port, function () {
9 | console.log('Starting the Node.js server for this sample. Navigate to http://localhost:'+port+'/ to view the webpage.');
10 | });
--------------------------------------------------------------------------------
/Tutorials/Bing-Image-Search/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "BingImageSearchApp",
3 | "version": "1.0.0",
4 | "description": "A single-page web app that displays images from the Bing Image Search API ",
5 | "main": "app.js",
6 | "license": "MIT",
7 | "scripts": {
8 | "test": "echo \"Error: no test specified\" && exit 1"
9 | },
10 | "author": "Microsoft Corporation",
11 | "dependencies": {
12 | "express": "^4.16.3"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Image-Search/public/css/style.css:
--------------------------------------------------------------------------------
1 |
2 | html, body, div, p, h1, h2 {font-family: Verdana, "Lucida Sans", sans-serif; color: #000;}
3 | html, body, div, p {font-weight: normal;}
4 | body {background-color: #fff;}
5 |
6 | h1, h2, h3 {font-weight: bold; color: #087;}
7 | sup {font-weight: normal;}
8 |
9 | html, body, div, p {font-size: 12px;}
10 | h1 {font-size: 20px; margin-top: 30px;}
11 | h2 {font-size: 16px; clear: left;}
12 | h3 {font-size: 14px; clear: left;}
13 |
14 | #sidebar {font-size: 10px; text-align: right; display: flex; padding: 0px 10px;
15 | float: right; margin-left: 15px; margin-right: 0px;}
16 | #sidebar p {margin-top: 0px; font-size: 10px;}
17 | #sidebar img {display: inline-block; float: none; padding-right: 0px;}
18 | #pole p {font-size: 14px;}
19 | #pole, #mainline, #json, #http, #sidebar, #error, #paging1, #paging2
20 | {display: none;}
21 |
22 | #term {width: 100%;}
23 | #logo {padding: 15px; float: right; border-left: 2px solid #ccc;}
24 | #query {float: left;}
25 |
26 | img {vertical-align: top; float: left; margin-right: 10px; margin-bottom: 10px;}
27 | p.images {display: inline-block; font-size: 9px; vertical-align: top;}
28 | p.images img {float: none;}
29 | p.relatedSearches {clear: none;}
30 | #logo p, p.news, p.webPages, p.images {clear: left;}
31 |
32 | a[href="#"]:link {color: blue;}
33 | a[href="#"]:visited {color: blue;}
34 |
35 | h3 a:visited {color: #087 !important;}
36 | h3 a:link {color: #087 !important;}
37 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Visual-Search/cognitive-services-bing-news-search-signup-requirements.md:
--------------------------------------------------------------------------------
1 | ---
2 | author: aahill
3 | ms.author: aahi
4 | ms.service: cognitive-services
5 | ms.topic: include
6 | ms.date: 08/08/2018
7 | ---
8 |
9 | You must have a [Cognitive Services API account](https://docs.microsoft.com/azure/cognitive-services/cognitive-services-apis-create-account) with access to the Bing Search APIs. If you don't have an Azure subscription, you can visit [the Microsoft Cognitive Services Web site](https://azure.microsoft.com/free/cognitive-services/), create a new Azure account, and try Cognitive Services for free. Before continuing, You will need the access key provided after activating your free trial, or a paid subscription key from your Azure dashboard.
10 |
11 | To start a subscription in Azure portal:
12 | 1. Enter 'BingSearchV7' in the text box at the top of the Azure portal that says `Search resources, services, and docs`.
13 | 2. Under Marketplace in the drop-down list, select `Bing Search v7`.
14 | 3. Enter `Name` for the new resource.
15 | 4. Select `Pay-As-You-Go` subscription.
16 | 5. Select `S9` pricing tier.
17 | 6. Click `Enable` to start the subscription.
18 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Web-Search/README.md:
--------------------------------------------------------------------------------
1 | # Bing Web Search single-page app
2 |
3 | This single-page app demonstrates how the Bing Web Search API can be used to retrieve, parse, and display relevant search results based on a user's query. HTML, CSS, and JS files are included. Express.js is the only dependency.
4 |
5 | The sample app can:
6 |
7 | * Call the Bing Web Search API with search options
8 | * Display web, image, news, and video results
9 | * Paginate results
10 | * Manage subscription keys
11 | * Handle errors
12 |
13 | To use this app, an [Azure Cognitive Services account](https://docs.microsoft.com/azure/cognitive-services/cognitive-services-apis-create-account) with Bing Search APIs is required. If you don't have an account, you can visit [the Microsoft Cognitive Services Web site](https://azure.microsoft.com/free/cognitive-services/), create a new Azure account, and try Cognitive Services for free.
14 |
15 | ## Prerequisites
16 |
17 | Here are a few things that you'll to run the app:
18 |
19 | * Node.js 8 or later
20 | * A subscription key
21 |
22 | ## Get started
23 |
24 | 1. Clone the repository.
25 | 2. Navigate to the Bing Web Search Tutorial directory.
26 | 3. Install dependencies:
27 | ```
28 | npm install
29 | ```
30 | 4. Run the sample app:
31 | ```
32 | node bing-web-search.js
33 | ```
34 | 5. Navigate to the provided URL and perform your first Bing Web Search!
35 |
36 | ## Next steps
37 |
38 | Learn how the app works with the [single-page web app tutorial](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-web-search/tutorial-bing-web-search-single-page-app).
39 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Web-Search/bing-web-search.js:
--------------------------------------------------------------------------------
1 | const https = require('https');
2 | const express = require("express");
3 | const app = express();
4 | var port = 8000;
5 |
6 | app.use(express.static("public"));
7 |
8 | app.listen(port, () => console.log('Your app is ready! Navigate to: http://localhost:' + port + '/.'));
9 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Web-Search/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "bing-web-search",
3 | "version": "1.0.0",
4 | "description": "A single-page web app that displays Bing Web Search results, including pages, images, news articles, and video thumbnails.",
5 | "main": "bing-web-search.js",
6 | "dependencies": {
7 | "express": "^4.16.3"
8 | },
9 | "devDependencies": {},
10 | "scripts": {
11 | "test": "node bing-web-search.js"
12 | },
13 | "author": "Microsoft Corporation"
14 | }
15 |
--------------------------------------------------------------------------------
/Tutorials/Bing-Web-Search/public/css/styles.css:
--------------------------------------------------------------------------------
1 | html, body, div, p, h1, h2 {font-family: Verdana, "Lucida Sans", sans-serif; color: #000;}
2 | html, body, div, p {font-weight: normal;}
3 | body {background-color: #fff;}
4 |
5 | h1, h2, h3 {font-weight: bold; color: #087;}
6 | sup {font-weight: normal;}
7 |
8 | html, body, div, p {font-size: 12px;}
9 | h1 {font-size: 20px; margin-top: 30px;}
10 | h2 {font-size: 16px; clear: left;}
11 | h3 {font-size: 14px; clear: left;}
12 |
13 | #sidebar {font-size: 10px; text-align: right; display: flex; padding: 0px 10px;
14 | float: right; margin-left: 15px; margin-right: 0px;}
15 | #sidebar p {margin-top: 0px; font-size: 10px;}
16 | #sidebar img {display: inline-block; float: none; padding-right: 0px;}
17 | #pole p {font-size: 14px;}
18 | #pole, #mainline, #json, #http, #sidebar, #error, #paging1, #paging2
19 | {display: none;}
20 |
21 | #term {width: 100%;}
22 | #logo {padding: 15px; float: right; border-left: 2px solid #ccc;}
23 | #query {float: left;}
24 |
25 | img {vertical-align: top; float: left; margin-right: 10px; margin-bottom: 10px;}
26 | p.images img {float: none;}
27 | p.relatedSearches {clear: none;}
28 | #logo p, p.news, p.webPages, p.images {clear: left;}
29 |
30 | a[href="#"]:link {color: blue;}
31 | a[href="#"]:visited {color: blue;}
32 |
33 | h3 a:visited {color: #087 !important;}
34 | h3 a:link {color: #087 !important;}
35 |
--------------------------------------------------------------------------------
/curl/Knowledge/QnA-Maker/query-endpoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # all information for your knowledgebase is found at
4 | # https://www.qnamaker.ai/Publish?kbId=your-kb-id
5 | #
6 | # replace values:
7 | # xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx = azure resources's key
8 | # yyyyyyyy-yyyy-yyyy-yyyy-yyyyyyyyyyyy = your kb id
9 | # myazureresourcename = QnA Maker resource name you used in Azure
10 | # your-question = question submitted from user
11 |
12 |
13 | curl \
14 | --header "Content-type: application/json" \
15 | --header "Authorization: EndpointKey xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" \
16 | --request POST \
17 | --data '{"question":"your-question"}' \
18 | https://myazureresourcename.azurewebsites.net/qnamaker/knowledgebases/yyyyyyyy-yyyy-yyyy-yyyy-yyyyyyyyyyyy/generateAnswer
--------------------------------------------------------------------------------
/curl/Language/native-document-pii.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/Language/native-document-pii.docx
--------------------------------------------------------------------------------
/curl/Language/native-document-pii.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/Language/native-document-pii.pdf
--------------------------------------------------------------------------------
/curl/Language/native-document-summarization.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/Language/native-document-summarization.docx
--------------------------------------------------------------------------------
/curl/Language/native-document-summarization.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/Language/native-document-summarization.pdf
--------------------------------------------------------------------------------
/curl/Translator/document-translation-sample.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/Translator/document-translation-sample.docx
--------------------------------------------------------------------------------
/curl/Translator/document-translation-sample.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/Translator/document-translation-sample.pdf
--------------------------------------------------------------------------------
/curl/Translator/translate-with-glossary.json:
--------------------------------------------------------------------------------
1 | {
2 | "inputs": [
3 | {
4 | "source": {
5 | "sourceUrl": "https://my.blob.core.windows.net/source-en"
6 | },
7 | "targets": [
8 | {
9 | "targetUrl": "https://my.blob.core.windows.net/target-fr",
10 | "language": "fr",
11 | "glossaries": [
12 | {
13 | "glossaryUrl": "https://my.blob.core.windows.net/glossaries/en-fr.tsv",
14 | "format": "tsv"
15 | }
16 | ]
17 |
18 | }
19 | ]
20 | }
21 | ]
22 | }
23 |
--------------------------------------------------------------------------------
/curl/form-recognizer/1040-training-data.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/1040-training-data.zip
--------------------------------------------------------------------------------
/curl/form-recognizer/1099-training-data.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/1099-training-data.zip
--------------------------------------------------------------------------------
/curl/form-recognizer/DriverLicense.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/DriverLicense.png
--------------------------------------------------------------------------------
/curl/form-recognizer/Invoice-6.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/Invoice-6.pdf
--------------------------------------------------------------------------------
/curl/form-recognizer/Passport.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/Passport.png
--------------------------------------------------------------------------------
/curl/form-recognizer/barcodes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/barcodes.png
--------------------------------------------------------------------------------
/curl/form-recognizer/business-card-english.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/business-card-english.jpg
--------------------------------------------------------------------------------
/curl/form-recognizer/businessCard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/businessCard.png
--------------------------------------------------------------------------------
/curl/form-recognizer/consent-form-training-dataset.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/consent-form-training-dataset.zip
--------------------------------------------------------------------------------
/curl/form-recognizer/contoso-allinone.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/contoso-allinone.jpg
--------------------------------------------------------------------------------
/curl/form-recognizer/contoso-receipt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/contoso-receipt.png
--------------------------------------------------------------------------------
/curl/form-recognizer/cosent-form-test-document.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/cosent-form-test-document.pdf
--------------------------------------------------------------------------------
/curl/form-recognizer/covid-informed-consent-for-inactivated-immunization-universal-2020-v.3.-covid-screening-vaccine-questions.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/covid-informed-consent-for-inactivated-immunization-universal-2020-v.3.-covid-screening-vaccine-questions.pdf
--------------------------------------------------------------------------------
/curl/form-recognizer/custom-vaccine/after-uploading-files.png:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/curl/form-recognizer/invoice-logic-apps-tutorial.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/invoice-logic-apps-tutorial.pdf
--------------------------------------------------------------------------------
/curl/form-recognizer/invoice_sample.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/invoice_sample.jpg
--------------------------------------------------------------------------------
/curl/form-recognizer/layout-page-001.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/layout-page-001.jpg
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/business_card.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/business_card.jpg
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/identity_documents.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/identity_documents.png
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/insurance-card.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/insurance-card.png
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/invoice.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/invoice.pdf
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/layout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/layout.png
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/read.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/read.png
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/receipt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/receipt.png
--------------------------------------------------------------------------------
/curl/form-recognizer/rest-api/w2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/rest-api/w2.png
--------------------------------------------------------------------------------
/curl/form-recognizer/restructure-power-bi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/restructure-power-bi.png
--------------------------------------------------------------------------------
/curl/form-recognizer/restructure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/restructure.png
--------------------------------------------------------------------------------
/curl/form-recognizer/sample-invoice.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/sample-invoice.pdf
--------------------------------------------------------------------------------
/curl/form-recognizer/sample-layout.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/sample-layout.pdf
--------------------------------------------------------------------------------
/curl/form-recognizer/sample_data.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/sample_data.zip
--------------------------------------------------------------------------------
/curl/form-recognizer/sample_data_with_labels.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/sample_data_with_labels.zip
--------------------------------------------------------------------------------
/curl/form-recognizer/sample_data_without_labels.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/sample_data_without_labels.zip
--------------------------------------------------------------------------------
/curl/form-recognizer/simple-invoice.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/simple-invoice.png
--------------------------------------------------------------------------------
/curl/form-recognizer/w8-training-data.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/w8-training-data.zip
--------------------------------------------------------------------------------
/curl/form-recognizer/w9-training-data.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/curl/form-recognizer/w9-training-data.zip
--------------------------------------------------------------------------------
/dotnet/Language/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - C# (dotnet)
5 | products:
6 | - azure
7 | - cognitive services
8 | ---
9 |
10 | # Language REST API samples
11 |
12 | These samples are quickstarts that show how to use various Language APIs, such as Bing Spell Check and Text Analytics.
13 |
14 | ## Prerequisites
15 |
16 | - Create an [Azure resource](https://portal.azure.com) for the service you'd like to try, for example a Bing Spell Check resource.
17 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
18 |
19 | ## Running the samples
20 | - Copy/paste the sample into a new C# console app (netcore) in Visual Studio, into the Program.cs file.
21 | - Build/run in Visual Studio.
22 |
23 | ## Resources
24 | #### Bing Spell Check: [Documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-spell-check/overview), [API](https://dev.cognitive.microsoft.com/docs/services/5f7d486e04d2430193e1ca8f760cd7ed/operations/57855119bca1df1c647bc358)
25 |
26 | #### Text Analytics: [Documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/index), [API](https://westus.dev.cognitive.microsoft.com/docs/services/TextAnalytics-v3-0-Preview-1/operations/Languages)
27 |
--------------------------------------------------------------------------------
/dotnet/Search/BingEntitySearchv7.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System;
3 | using System.Net.Http;
4 |
5 | /* This sample makes a call to the Bing Entity Search v7 API with a query and returns details about it.
6 | * Bing Entity Search API:
7 | * https://westus2.dev.cognitive.microsoft.com/docs/services/7a3fb374be374859a823b79fd938cc65/operations/52069701a465405ab3286f82
8 | */
9 |
10 | namespace BingEntitySearch
11 | {
12 | class Program
13 | {
14 | // Add your key and endpoint to your environment variables.
15 | static string key = Environment.GetEnvironmentVariable("BING_ENTITY_SEARCH_SUBSCRIPTION_KEY");
16 | static string endpoint = Environment.GetEnvironmentVariable("BING_ENTITY_SEARCH_ENDPOINT");
17 | static string path = "/bing/v7.0/entities/";
18 |
19 | static string market = "en-US";
20 |
21 | static string query = "italian restaurant near me";
22 |
23 | async static void Search()
24 | {
25 | HttpClient client = new HttpClient();
26 | client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", key);
27 |
28 | string uri = endpoint + path + "?mkt=" + market + "&q=" + System.Net.WebUtility.UrlEncode(query);
29 |
30 | HttpResponseMessage response = await client.GetAsync(uri);
31 |
32 | string contentString = await response.Content.ReadAsStringAsync();
33 | dynamic parsedJson = JsonConvert.DeserializeObject(contentString);
34 |
35 | Console.WriteLine(parsedJson);
36 | }
37 |
38 | static void Main(string[] args)
39 | {
40 | Search();
41 | Console.ReadLine();
42 | }
43 |
44 | }
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/dotnet/Search/BingImageSearchInsights.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System;
3 | using System.Text;
4 | using System.Net;
5 |
6 | /* This sample makes a call to the Bing Image Search API with a query image and returns visually similar images.
7 | * Details about each similar image, also known as "insights", are returned in the JSON response.
8 | * Documentation: https://docs.microsoft.com/en-us/azure/cognitive-services/bing-web-search/
9 | */
10 |
11 | namespace BingImageSearchInsights
12 | {
13 | class Program
14 | {
15 | // Add your Azure Bing Search v7 subscription key to your environment variables
16 | static string subscriptionKey = Environment.GetEnvironmentVariable("BING_SEARCH_V7_SUBSCRIPTION_KEY");
17 | // Add your Azure Bing Search v7 endpoint to your environment variables
18 | static string endpoint = Environment.GetEnvironmentVariable("BING_SEARCH_V7_ENDPOINT") + "/bing/v7.0/images/details";
19 |
20 | // Place an image (for example a jpg or png) in your bin\Debug\netcoreapp3.0 folder.
21 | const string imageFile = "YOUR-IMAGE.jpg";
22 |
23 | static void Main()
24 | {
25 | Console.OutputEncoding = Encoding.UTF8;
26 |
27 | WebClient client = new WebClient();
28 | client.Headers["Ocp-Apim-Subscription-Key"] = subscriptionKey;
29 | client.Headers["ContentType"] = "multipart/form-data";
30 | // Returns all insights
31 | byte[] response = client.UploadFile(endpoint + "?modules=All", imageFile);
32 | var json = Encoding.Default.GetString(response);
33 |
34 | // Pretty print the result
35 | dynamic parsedJson = JsonConvert.DeserializeObject(json);
36 | Console.WriteLine("\nBing Image Insights JSON Response:\n");
37 | Console.WriteLine(JsonConvert.SerializeObject(parsedJson, Formatting.Indented));
38 |
39 | Console.ReadLine();
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/dotnet/Search/BingImageSearchv7.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | using Newtonsoft.Json;
5 | using System;
6 | using System.Text;
7 | using System.Net;
8 | using System.IO;
9 | using System.Collections.Generic;
10 |
11 | /* This sample makes a call to the Bing Search API with a text query and returns relevant data from the web.
12 | * Documentation: https://docs.microsoft.com/en-us/azure/cognitive-services/bing-web-search/
13 | */
14 |
15 | namespace BingImageSearch
16 | {
17 | class Program
18 | {
19 | // Add your Azure Bing Search V7 subscription key and endpoint to your environment variables.
20 | static string subscriptionKey = Environment.GetEnvironmentVariable("BING_SEARCH_V7_SUBSCRIPTION_KEY");
21 | static string endpoint = Environment.GetEnvironmentVariable("BING_SEARCH_V7_ENDPOINT") + "/bing/v7.0/images/search";
22 |
23 | const string query = "puppies";
24 |
25 | static void Main()
26 | {
27 | Console.OutputEncoding = Encoding.UTF8;
28 | Dictionary relevantHeaders = new Dictionary();
29 |
30 | Console.WriteLine("Searching images for: " + query);
31 |
32 | // Construct the URI of the search request
33 | var uriQuery = endpoint + "?q=" + Uri.EscapeDataString(query);
34 |
35 | // Perform the Web request and get the response
36 | WebRequest request = HttpWebRequest.Create(uriQuery);
37 | request.Headers["Ocp-Apim-Subscription-Key"] = subscriptionKey;
38 | HttpWebResponse response = (HttpWebResponse)request.GetResponseAsync().Result;
39 | string json = new StreamReader(response.GetResponseStream()).ReadToEnd();
40 |
41 | // Extract Bing HTTP headers
42 | foreach (String header in response.Headers)
43 | {
44 | if (header.StartsWith("BingAPIs-") || header.StartsWith("X-MSEdge-"))
45 | relevantHeaders[header] = response.Headers[header];
46 | }
47 |
48 | Console.WriteLine("\nRelevant HTTP Headers:\n");
49 | foreach (var header in relevantHeaders)
50 | Console.WriteLine(header.Key + ": " + header.Value);
51 |
52 | Console.WriteLine("\nJSON Response:\n");
53 | dynamic parsedJson = JsonConvert.DeserializeObject(json);
54 | Console.WriteLine(JsonConvert.SerializeObject(parsedJson, Formatting.Indented));
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/dotnet/Search/BingVideoSearchv7.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | using Newtonsoft.Json;
5 | using System;
6 | using System.Text;
7 | using System.Net;
8 | using System.IO;
9 |
10 | /* This sample makes a call to the Bing Video Search API with a query and returns data about it.
11 | * Bing Video Search API:
12 | * https://dev.cognitive.microsoft.com/docs/services/3960b4bc7b3a4bc5b97c42d78036d234/operations/56b440d2cf5ff8098cef380b
13 | */
14 |
15 | namespace BingVideoSearch
16 | {
17 |
18 | class Program
19 | {
20 | // Add your Azure Bing Search v7 key and endpoint to your environment variables.
21 | static string subscriptionKey = Environment.GetEnvironmentVariable("BING_SEARCH_V7_SUBSCRIPTION_KEY");
22 | static string endpoint = Environment.GetEnvironmentVariable("BING_SEARCH_V7_ENDPOINT") + "/bing/v7.0/videos/search";
23 |
24 | const string query = "kittens";
25 |
26 | static void Main()
27 | {
28 | Console.OutputEncoding = Encoding.UTF8;
29 | Console.WriteLine("Searching videos for: " + query);
30 |
31 | // Construct the URI of the search request
32 | var uriQuery = endpoint + "?q=" + Uri.EscapeDataString(query);
33 |
34 | // Perform the Web request and get the response
35 | WebRequest request = HttpWebRequest.Create(uriQuery);
36 | request.Headers["Ocp-Apim-Subscription-Key"] = subscriptionKey;
37 | HttpWebResponse response = (HttpWebResponse)request.GetResponseAsync().Result;
38 | string json = new StreamReader(response.GetResponseStream()).ReadToEnd();
39 |
40 | Console.WriteLine("\nJSON Response:\n");
41 | dynamic parsedJson = JsonConvert.DeserializeObject(json);
42 | Console.WriteLine(JsonConvert.SerializeObject(parsedJson, Formatting.Indented));
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/AnalyzeImage/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.AnalyzeImage.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/BatchReadFile/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.BatchReadFile.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/DescribeImage/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.DescribeImage.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/DetectObjects/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.DetectObjects.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/GetAreaOfInterest/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.GetAreaOfInterest.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/GetThumbnail/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.GetThumbnail.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/OCR/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.OCR.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/RecognizeDomainSpecificContent/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.RecognizeDomainSpecificContent.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/RecognizeText/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.RecognizeText.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/ComputerVision/TagImage/Microsoft.Azure.CognitiveServices.Samples.ComputerVision.TagImage.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.0;net461;netstandard1.4
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/Face/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - C# (dotnet)
5 | products:
6 | - azure
7 | - cognitive services
8 | ---
9 |
10 | # Face Rest API
11 |
12 | These samples are quickstarts that show how to use the Face REST API.
13 |
14 | ## Prerequisites
15 |
16 | - [An Azure Face resource](https://azure.microsoft.com/en-us/try/cognitive-services/?api=face)
17 | - Download the images from [this repo](https://github.com/Azure-Samples/cognitive-services-sample-data-files/blob/master/ComputerVision/Images/), then create an "Image" folder inside of a Visual Studio console application in your bin\debug\netcoreapp3.0 folder, then add the image to it. Or use an image of your own.
18 | - Add your Face subscription key and endpoint to your environment variables with the suggested variable names in the sample.
19 |
20 | ## Running the samples
21 |
22 | Run the sample in Visual Studio and see the console output.
23 |
24 | ## Resources
25 | - Face documentation:
26 | https://docs.microsoft.com/en-us/azure/cognitive-services/face/index
27 | - Face API:
28 | https://westus.dev.cognitive.microsoft.com/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236
29 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - C# (dotnet)
5 | products:
6 | - azure
7 | - cognitive services
8 | ---
9 |
10 | # Ink Recognizer REST API samples
11 |
12 | These samples are quickstarts that show how to use the Ink Recognizer API.
13 |
14 | ## Prerequisites
15 |
16 | - An [Azure Ink Recognizer resource](https://portal.azure.com/#blade/Microsoft_Azure_Marketplace/MarketplaceOffersBlade/selectedMenuItemId/home/searchQuery/ink%20recognizer)
17 | - For the sample in the Quickstart folder: create a Visual Studio C# console app and copy/paste the `recognizeInk.cs` code into your `Program.cs` file.
18 | - Add the `example-ink-strokes.json` file to your bin\debug\netcoreapp3.0 folder.
19 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
20 |
21 | ## Running the samples
22 | - If trying the UWP or WPF app (instructions in their READMEs), clone this repo and run the projects from there.
23 | `git clone https://github.com/Azure-Samples/cognitive-services-REST-api-samples.git`
24 | - Otherwise run the Visual Studio console app for the `recognizeInk.cs` results.
25 |
26 | ## Resources
27 | - Ink Recognizer documentation:
28 | https://docs.microsoft.com/en-us/azure/cognitive-services/ink-recognizer/index
29 | - Ink Recognizer API:
30 | https://dev.cognitive.microsoft.com/docs/services/inkrecognizer/operations/inkRecognizerPUT
31 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [project-title] Changelog
2 |
3 |
4 | # x.y.z (yyyy-mm-dd)
5 |
6 | *Features*
7 | * ...
8 |
9 | *Bug Fixes*
10 | * ...
11 |
12 | *Breaking Changes*
13 | * ...
14 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation. All rights reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/App.xaml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-100.png
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-125.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-125.png
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-150.png
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-200.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-200.png
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/dotnet/Vision/InkRecognition/uwp-app/cs/Assets/SplashScreen.scale-400.png
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/HTTP/HttpManager.cs:
--------------------------------------------------------------------------------
1 | using Contoso.NoteTaker.JSON;
2 | using System;
3 | using System.Net;
4 | using System.Net.Http;
5 | using System.Text;
6 | using System.Threading.Tasks;
7 |
8 | namespace Contoso.NoteTaker.Http
9 | {
10 | public class HttpManager
11 | {
12 | string destinationUrl;
13 | HttpClient httpClient;
14 |
15 | public HttpManager(string appKey, string baseAddress, string destinationUrl)
16 | {
17 | httpClient = new HttpClient() { BaseAddress = new Uri(baseAddress)};
18 | this.destinationUrl = destinationUrl;
19 |
20 | httpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", appKey);
21 | }
22 |
23 | public async Task PutAsync(string jsonRequest)
24 | {
25 | try
26 | {
27 | var httpContent = new StringContent(jsonRequest, Encoding.UTF8, "application/json");
28 | var httpResponse = await httpClient.PutAsync(destinationUrl, httpContent);
29 |
30 | // Throw exception for malformed/unauthorized http requests
31 | if (httpResponse.StatusCode == HttpStatusCode.BadRequest || httpResponse.StatusCode == HttpStatusCode.Unauthorized)
32 | {
33 | var errorJson = await httpResponse.Content.ReadAsStringAsync();
34 | var errDetail = JSONProcessor.ParseInkRecognitionError(errorJson);
35 | throw new HttpRequestException(errDetail.ToString());
36 | }
37 | return httpResponse;
38 | }
39 | catch(Exception e)
40 | {
41 | throw e;
42 | }
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/Alternates.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class Alternates
6 | {
7 | [JsonProperty(PropertyName = "category")]
8 | public RecognitionUnitKind Kind { get; set; }
9 |
10 | [JsonProperty(PropertyName ="center")]
11 | public PointDetailsPattern Center { get; set; }
12 |
13 | [JsonProperty(PropertyName ="points")]
14 | public PointDetailsPattern Points { get; set; }
15 |
16 | [JsonProperty(PropertyName ="rotationAngle")]
17 | public PointDetailsPattern RotationAngle { get; set; }
18 |
19 | [JsonProperty(PropertyName = "confidence")]
20 | public float Confidence { get; set; }
21 |
22 | [JsonProperty(PropertyName = "recognizedString")]
23 | public string RecognizedText { get; set; }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/HttpErrorDetails.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class HttpErrorDetails
6 | {
7 | [JsonProperty(PropertyName = "statusCode")]
8 | public string ErrorCode { get; set; }
9 |
10 | [JsonProperty(PropertyName = "message")]
11 | public string Message { get; set; }
12 |
13 | [JsonProperty(PropertyName = "target")]
14 | public string Target { get; set; }
15 |
16 | [JsonProperty(PropertyName = "details")]
17 | public HttpErrorDetails[] Details { get; set; }
18 |
19 | public override string ToString()
20 | {
21 | string msg = "";
22 | msg += (ErrorCode != null) ? " Http Error code : " + ErrorCode : "";
23 | msg += (Target != null) ? " Target : " + Target : "";
24 | msg += (Message != null) ? " Message : " + Message : "";
25 |
26 | if (Details != null)
27 | {
28 | msg += "\n Error Details : ";
29 | foreach (var errDetail in Details)
30 | {
31 | msg += "\n" + errDetail.ToString();
32 | }
33 | }
34 | return msg;
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkBullet.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class InkBullet : InkRecognitionUnit
6 | {
7 | [JsonProperty(PropertyName = "recognizedText")]
8 | public string RecognizedText { get; set; }
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkLine.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System.Collections.Generic;
3 |
4 | namespace Contoso.NoteTaker.JSON.Format
5 | {
6 | public class InkLine : InkRecognitionUnit
7 | {
8 | [JsonProperty(PropertyName = "recognizedText")]
9 | public string RecognizedText { get; set; }
10 |
11 | [JsonProperty(PropertyName = "alternates")]
12 | public List Alternates { get; set; }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkListItem.cs:
--------------------------------------------------------------------------------
1 | namespace Contoso.NoteTaker.JSON.Format
2 | {
3 | public class InkListItem : InkRecognitionUnit
4 | {
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkParagraph.cs:
--------------------------------------------------------------------------------
1 | namespace Contoso.NoteTaker.JSON.Format
2 | {
3 | public class InkParagraph : InkRecognitionUnit
4 | {
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkRecognitionRequest.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using Newtonsoft.Json.Converters;
3 | using System.Collections.Generic;
4 | using System.Runtime.Serialization;
5 |
6 | namespace Contoso.NoteTaker.JSON.Format
7 | {
8 | class InkRecognitionRequest
9 | {
10 | [JsonProperty(PropertyName = "applicationType"), JsonConverter(typeof(StringEnumConverter))]
11 | public InkContentType ApplicationType { get; set; } = InkContentType.Mixed;
12 |
13 | [JsonProperty(PropertyName = "unit"), JsonConverter(typeof(StringEnumConverter))]
14 | public InkPointUnitType Unit { get; set; } = InkPointUnitType.Millimeter;
15 |
16 | [JsonProperty(PropertyName = "language")]
17 | public string Language { get; set; } = "en-US";
18 |
19 | [JsonProperty(PropertyName = "strokes")]
20 | public IReadOnlyList Strokes {get; set;}
21 |
22 | public InkRecognitionRequest(IReadOnlyList strokes)
23 | {
24 | this.Strokes = strokes;
25 | }
26 | }
27 |
28 | public enum InkContentType
29 | {
30 | [EnumMember(Value = "drawing")]
31 | Drawing,
32 | [EnumMember(Value = "writing")]
33 | Writing,
34 | [EnumMember(Value = "mixed")]
35 | Mixed
36 | }
37 |
38 | public enum InkPointUnitType
39 | {
40 | [EnumMember(Value = "mm")]
41 | Millimeter,
42 | [EnumMember(Value = "cm")]
43 | Centimeter,
44 | [EnumMember(Value = "in")]
45 | Inch
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkRecognitionResponse.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System.Collections.Generic;
3 |
4 | namespace Contoso.NoteTaker.JSON.Format
5 | {
6 | public class InkRecognitionResponse
7 | {
8 | [JsonProperty(PropertyName = "recognitionUnits")]
9 | public List RecognitionUnits { get; set; }
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkRecognitionUnit.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System;
3 | using System.Collections.Generic;
4 | using System.Runtime.Serialization;
5 |
6 | namespace Contoso.NoteTaker.JSON.Format
7 | {
8 | abstract public class InkRecognitionUnit
9 | {
10 | [JsonProperty(PropertyName = "id")]
11 | public UInt64 Id { get; set; }
12 |
13 | [JsonProperty(PropertyName = "category")]
14 | public RecognitionUnitKind Kind { get; set; }
15 |
16 | [JsonProperty(PropertyName = "childIds")]
17 | public List ChildIds { get; set; }
18 |
19 | [JsonProperty(PropertyName = "class")]
20 | public RecognitionUnitType Type { get; set; }
21 |
22 | [JsonProperty(PropertyName = "parentId")]
23 | public UInt64 ParentId { get; set; }
24 |
25 | [JsonProperty(PropertyName = "boundingRectangle")]
26 | public Rectangle BoundingRect { get; set; }
27 |
28 | [JsonProperty(PropertyName = "rotatedBoundingRectangle")]
29 | public List RotatedBoundingRect { get; set; }
30 |
31 | [JsonProperty(PropertyName = "strokeIds")]
32 | public List StrokeIds { get; set; }
33 | }
34 |
35 | public enum RecognitionUnitType
36 | {
37 | [EnumMember(Value = "leaf")]
38 | Leaf,
39 | [EnumMember(Value = "container")]
40 | Container
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkWord.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System.Collections.Generic;
3 |
4 | namespace Contoso.NoteTaker.JSON.Format
5 | {
6 | public class InkWord : InkRecognitionUnit
7 | {
8 | [JsonProperty(PropertyName = "recognizedText")]
9 | public string RecognizedText { get; set; }
10 |
11 | [JsonProperty(PropertyName = "alternates")]
12 | public List Alternates { get; set; }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/InkWritingRegion.cs:
--------------------------------------------------------------------------------
1 | namespace Contoso.NoteTaker.JSON.Format
2 | {
3 | public class InkWritingRegion : InkRecognitionUnit
4 | {
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/PointDetailsPattern.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class PointDetailsPattern
6 | {
7 | [JsonProperty(PropertyName = "x")]
8 | public float X { get; set; }
9 |
10 | [JsonProperty(PropertyName = "y")]
11 | public float Y { get; set; }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/RecognitionUnitKind.cs:
--------------------------------------------------------------------------------
1 | using System.Runtime.Serialization;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public enum RecognitionUnitKind
6 | {
7 | [EnumMember(Value = "writingRegion")]
8 | WritingRegion,
9 |
10 | [EnumMember(Value = "paragraph")]
11 | Paragraph,
12 |
13 | [EnumMember(Value = "line")]
14 | Line,
15 |
16 | [EnumMember(Value = "inkWord")]
17 | InkWord,
18 |
19 | [EnumMember(Value = "inkDrawing")]
20 | InkDrawing,
21 |
22 | [EnumMember(Value = "listItem")]
23 | ListItem,
24 |
25 | [EnumMember(Value = "inkBullet")]
26 | InkBullet,
27 |
28 | [EnumMember(Value = "unknown")]
29 | Unknown
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/Format/Rectangle.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class Rectangle
6 | {
7 | [JsonProperty(PropertyName = "topX")]
8 | public float TopX { get; set; }
9 |
10 | [JsonProperty(PropertyName = "topY")]
11 | public float TopY { get; set; }
12 |
13 | [JsonProperty(PropertyName = "width")]
14 | public float Width { get; set; }
15 |
16 | [JsonProperty(PropertyName = "height")]
17 | public float Height { get; set; }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/JSON/JSONProcessor.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using Contoso.NoteTaker.JSON.Converter;
3 | using Contoso.NoteTaker.JSON.Format;
4 | using Contoso.NoteTaker.Services.Ink;
5 | using System;
6 | using System.Collections.Generic;
7 |
8 | namespace Contoso.NoteTaker.JSON
9 | {
10 | class JSONProcessor
11 | {
12 | public static string CreateInkRecognitionRequest(IReadOnlyList strokes)
13 | {
14 | try
15 | {
16 | InkRecognitionRequest request = new InkRecognitionRequest(strokes);
17 | var requestJson = JsonConvert.SerializeObject(request);
18 | return requestJson;
19 | }
20 | catch(Exception e)
21 | {
22 | throw new JsonReaderException(e.Message);
23 | }
24 | }
25 |
26 | public static InkRecognitionRoot ParseInkRecognizerResponse(string responseJson)
27 | {
28 | try
29 | {
30 | var responseObj = JsonConvert.DeserializeObject(responseJson,
31 | new InkRecognitionResponseConverter());
32 | var result = new InkRecognitionRoot(responseObj);
33 | return result;
34 | }
35 | catch(Exception e)
36 | {
37 | throw new JsonWriterException(e.Message);
38 | }
39 | }
40 |
41 | public static HttpErrorDetails ParseInkRecognitionError(string errorJson)
42 | {
43 | try
44 | {
45 | var error = JsonConvert.DeserializeObject(errorJson);
46 | return error;
47 | }
48 | catch(Exception e)
49 | {
50 | throw new JsonReaderException(e.Message);
51 | }
52 |
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/NoteTaker.xaml:
--------------------------------------------------------------------------------
1 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Package.appxmanifest:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | NoteTaker
7 | Contoso
8 | Assets\StoreLogo.png
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.CompilerServices;
3 | using System.Runtime.InteropServices;
4 |
5 | // General Information about an assembly is controlled through the following
6 | // set of attributes. Change these attribute values to modify the information
7 | // associated with an assembly.
8 | [assembly: AssemblyTitle("NoteTaker")]
9 | [assembly: AssemblyDescription("")]
10 | [assembly: AssemblyConfiguration("")]
11 | [assembly: AssemblyCompany("")]
12 | [assembly: AssemblyProduct("NoteTaker")]
13 | [assembly: AssemblyCopyright("Copyright © 2019")]
14 | [assembly: AssemblyTrademark("")]
15 | [assembly: AssemblyCulture("")]
16 |
17 | // Version information for an assembly consists of the following four values:
18 | //
19 | // Major Version
20 | // Minor Version
21 | // Build Number
22 | // Revision
23 | //
24 | // You can specify all the values or you can default the Build and Revision Numbers
25 | // by using the '*' as shown below:
26 | // [assembly: AssemblyVersion("1.0.*")]
27 | [assembly: AssemblyVersion("1.0.0.0")]
28 | [assembly: AssemblyFileVersion("1.0.0.0")]
29 | [assembly: ComVisible(false)]
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/uwp-app/cs/Properties/Default.rd.xml:
--------------------------------------------------------------------------------
1 |
17 |
18 |
19 |
20 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [project-title] Changelog
2 |
3 |
4 | # x.y.z (yyyy-mm-dd)
5 |
6 | *Features*
7 | * ...
8 |
9 | *Bug Fixes*
10 | * ...
11 |
12 | *Breaking Changes*
13 | * ...
14 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/App.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/App.xaml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/App.xaml.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Configuration;
4 | using System.Data;
5 | using System.Linq;
6 | using System.Threading.Tasks;
7 | using System.Windows;
8 |
9 | namespace NoteTaker
10 | {
11 | ///
12 | /// Interaction logic for App.xaml
13 | ///
14 | public partial class App : Application
15 | {
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/HTTP/HttpManager.cs:
--------------------------------------------------------------------------------
1 | using Contoso.NoteTaker.JSON;
2 | using System;
3 | using System.Net;
4 | using System.Net.Http;
5 | using System.Text;
6 | using System.Threading.Tasks;
7 |
8 | namespace Contoso.NoteTaker.Http
9 | {
10 | public class HttpManager
11 | {
12 | string destinationUrl;
13 | HttpClient httpClient;
14 |
15 | public HttpManager(string appKey, string baseAddress, string destinationUrl)
16 | {
17 | httpClient = new HttpClient() { BaseAddress = new Uri(baseAddress)};
18 | this.destinationUrl = destinationUrl;
19 |
20 | httpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", appKey);
21 | }
22 |
23 | public async Task PutAsync(string jsonRequest)
24 | {
25 | try
26 | {
27 | var httpContent = new StringContent(jsonRequest, Encoding.UTF8, "application/json");
28 | var httpResponse = await httpClient.PutAsync(destinationUrl, httpContent);
29 |
30 | // Throw exception for malformed/unauthorized http requests
31 | if (httpResponse.StatusCode == HttpStatusCode.BadRequest || httpResponse.StatusCode == HttpStatusCode.Unauthorized)
32 | {
33 | var errorJson = await httpResponse.Content.ReadAsStringAsync();
34 | var errDetail = JSONProcessor.ParseInkRecognitionError(errorJson);
35 | throw new HttpRequestException(errDetail.ToString());
36 | }
37 | return httpResponse;
38 | }
39 | catch(Exception e)
40 | {
41 | throw e;
42 | }
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/Alternates.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class Alternates
6 | {
7 | [JsonProperty(PropertyName = "category")]
8 | public RecognitionUnitKind Kind { get; set; }
9 |
10 | [JsonProperty(PropertyName ="center")]
11 | public PointDetailsPattern Center { get; set; }
12 |
13 | [JsonProperty(PropertyName ="points")]
14 | public PointDetailsPattern Points { get; set; }
15 |
16 | [JsonProperty(PropertyName ="rotationAngle")]
17 | public PointDetailsPattern RotationAngle { get; set; }
18 |
19 | [JsonProperty(PropertyName = "confidence")]
20 | public float Confidence { get; set; }
21 |
22 | [JsonProperty(PropertyName = "recognizedString")]
23 | public string RecognizedText { get; set; }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/HttpErrorDetails.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class HttpErrorDetails
6 | {
7 | [JsonProperty(PropertyName = "statusCode")]
8 | public string ErrorCode { get; set; }
9 |
10 | [JsonProperty(PropertyName = "message")]
11 | public string Message { get; set; }
12 |
13 | [JsonProperty(PropertyName = "target")]
14 | public string Target { get; set; }
15 |
16 | [JsonProperty(PropertyName = "details")]
17 | public HttpErrorDetails[] Details { get; set; }
18 |
19 | public override string ToString()
20 | {
21 | string msg = "";
22 | msg += (ErrorCode != null) ? " Http Error code : " + ErrorCode : "";
23 | msg += (Target != null) ? " Target : " + Target : "";
24 | msg += (Message != null) ? " Message : " + Message : "";
25 |
26 | if (Details != null)
27 | {
28 | msg += "\n Error Details : ";
29 | foreach (var errDetail in Details)
30 | {
31 | msg += "\n" + errDetail.ToString();
32 | }
33 | }
34 | return msg;
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkBullet.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class InkBullet : InkRecognitionUnit
6 | {
7 | [JsonProperty(PropertyName = "recognizedText")]
8 | public string RecognizedText { get; set; }
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkLine.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System.Collections.Generic;
3 |
4 | namespace Contoso.NoteTaker.JSON.Format
5 | {
6 | public class InkLine : InkRecognitionUnit
7 | {
8 | [JsonProperty(PropertyName = "recognizedText")]
9 | public string RecognizedText { get; set; }
10 |
11 | [JsonProperty(PropertyName = "alternates")]
12 | public List Alternates { get; set; }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkListItem.cs:
--------------------------------------------------------------------------------
1 | namespace Contoso.NoteTaker.JSON.Format
2 | {
3 | public class InkListItem : InkRecognitionUnit
4 | {
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkParagraph.cs:
--------------------------------------------------------------------------------
1 | namespace Contoso.NoteTaker.JSON.Format
2 | {
3 | public class InkParagraph : InkRecognitionUnit
4 | {
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkRecognitionRequest.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using Newtonsoft.Json.Converters;
3 | using System.Collections.Generic;
4 | using System.Runtime.Serialization;
5 |
6 | namespace Contoso.NoteTaker.JSON.Format
7 | {
8 | class InkRecognitionRequest
9 | {
10 | [JsonProperty(PropertyName = "applicationType"), JsonConverter(typeof(StringEnumConverter))]
11 | public InkContentType ApplicationType { get; set; } = InkContentType.Mixed;
12 |
13 | [JsonProperty(PropertyName = "unit"), JsonConverter(typeof(StringEnumConverter))]
14 | public InkPointUnitType Unit { get; set; } = InkPointUnitType.Millimeter;
15 |
16 | [JsonProperty(PropertyName = "language")]
17 | public string Language { get; set; } = "en-US";
18 |
19 | [JsonProperty(PropertyName = "strokes")]
20 | public IReadOnlyList Strokes {get; set;}
21 |
22 | public InkRecognitionRequest(IReadOnlyList strokes)
23 | {
24 | this.Strokes = strokes;
25 | }
26 | }
27 |
28 | public enum InkContentType
29 | {
30 | [EnumMember(Value = "drawing")]
31 | Drawing,
32 | [EnumMember(Value = "writing")]
33 | Writing,
34 | [EnumMember(Value = "mixed")]
35 | Mixed
36 | }
37 |
38 | public enum InkPointUnitType
39 | {
40 | [EnumMember(Value = "mm")]
41 | Millimeter,
42 | [EnumMember(Value = "cm")]
43 | Centimeter,
44 | [EnumMember(Value = "in")]
45 | Inch
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkRecognitionResponse.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System.Collections.Generic;
3 |
4 | namespace Contoso.NoteTaker.JSON.Format
5 | {
6 | public class InkRecognitionResponse
7 | {
8 | [JsonProperty(PropertyName = "recognitionUnits")]
9 | public List RecognitionUnits { get; set; }
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkRecognitionUnit.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System;
3 | using System.Collections.Generic;
4 | using System.Runtime.Serialization;
5 |
6 | namespace Contoso.NoteTaker.JSON.Format
7 | {
8 | abstract public class InkRecognitionUnit
9 | {
10 | [JsonProperty(PropertyName = "id")]
11 | public UInt64 Id { get; set; }
12 |
13 | [JsonProperty(PropertyName = "category")]
14 | public RecognitionUnitKind Kind { get; set; }
15 |
16 | [JsonProperty(PropertyName = "childIds")]
17 | public List ChildIds { get; set; }
18 |
19 | [JsonProperty(PropertyName = "class")]
20 | public RecognitionUnitType Type { get; set; }
21 |
22 | [JsonProperty(PropertyName = "parentId")]
23 | public UInt64 ParentId { get; set; }
24 |
25 | [JsonProperty(PropertyName = "boundingRectangle")]
26 | public Rectangle BoundingRect { get; set; }
27 |
28 | [JsonProperty(PropertyName = "rotatedBoundingRectangle")]
29 | public List RotatedBoundingRect { get; set; }
30 |
31 | [JsonProperty(PropertyName = "strokeIds")]
32 | public List StrokeIds { get; set; }
33 | }
34 |
35 | public enum RecognitionUnitType
36 | {
37 | [EnumMember(Value = "leaf")]
38 | Leaf,
39 | [EnumMember(Value = "container")]
40 | Container
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkWord.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using System.Collections.Generic;
3 |
4 | namespace Contoso.NoteTaker.JSON.Format
5 | {
6 | public class InkWord : InkRecognitionUnit
7 | {
8 | [JsonProperty(PropertyName = "recognizedText")]
9 | public string RecognizedText { get; set; }
10 |
11 | [JsonProperty(PropertyName = "alternates")]
12 | public List Alternates { get; set; }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/InkWritingRegion.cs:
--------------------------------------------------------------------------------
1 | namespace Contoso.NoteTaker.JSON.Format
2 | {
3 | public class InkWritingRegion : InkRecognitionUnit
4 | {
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/PointDetailsPattern.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class PointDetailsPattern
6 | {
7 | [JsonProperty(PropertyName = "x")]
8 | public float X { get; set; }
9 |
10 | [JsonProperty(PropertyName = "y")]
11 | public float Y { get; set; }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/RecognitionUnitKind.cs:
--------------------------------------------------------------------------------
1 | using System.Runtime.Serialization;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public enum RecognitionUnitKind
6 | {
7 | [EnumMember(Value = "writingRegion")]
8 | WritingRegion,
9 |
10 | [EnumMember(Value = "paragraph")]
11 | Paragraph,
12 |
13 | [EnumMember(Value = "line")]
14 | Line,
15 |
16 | [EnumMember(Value = "inkWord")]
17 | InkWord,
18 |
19 | [EnumMember(Value = "inkDrawing")]
20 | InkDrawing,
21 |
22 | [EnumMember(Value = "listItem")]
23 | ListItem,
24 |
25 | [EnumMember(Value = "inkBullet")]
26 | InkBullet,
27 |
28 | [EnumMember(Value = "unknown")]
29 | Unknown
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/Format/Rectangle.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 |
3 | namespace Contoso.NoteTaker.JSON.Format
4 | {
5 | public class Rectangle
6 | {
7 | [JsonProperty(PropertyName = "topX")]
8 | public float TopX { get; set; }
9 |
10 | [JsonProperty(PropertyName = "topY")]
11 | public float TopY { get; set; }
12 |
13 | [JsonProperty(PropertyName = "width")]
14 | public float Width { get; set; }
15 |
16 | [JsonProperty(PropertyName = "height")]
17 | public float Height { get; set; }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/JSON/JSONProcessor.cs:
--------------------------------------------------------------------------------
1 | using Newtonsoft.Json;
2 | using Contoso.NoteTaker.JSON.Converter;
3 | using Contoso.NoteTaker.JSON.Format;
4 | using Contoso.NoteTaker.Services.Ink;
5 | using System;
6 | using System.Collections.Generic;
7 |
8 | namespace Contoso.NoteTaker.JSON
9 | {
10 | class JSONProcessor
11 | {
12 | public static string CreateInkRecognitionRequest(IReadOnlyList strokes)
13 | {
14 | try
15 | {
16 | InkRecognitionRequest request = new InkRecognitionRequest(strokes);
17 | var requestJson = JsonConvert.SerializeObject(request);
18 | return requestJson;
19 | }
20 | catch(Exception e)
21 | {
22 | throw new JsonReaderException(e.Message);
23 | }
24 | }
25 |
26 | public static InkRecognitionRoot ParseInkRecognizerResponse(string responseJson)
27 | {
28 | try
29 | {
30 | var responseObj = JsonConvert.DeserializeObject(responseJson,
31 | new InkRecognitionResponseConverter());
32 | var result = new InkRecognitionRoot(responseObj);
33 | return result;
34 | }
35 | catch(Exception e)
36 | {
37 | throw new JsonWriterException(e.Message);
38 | }
39 | }
40 |
41 | public static HttpErrorDetails ParseInkRecognitionError(string errorJson)
42 | {
43 | try
44 | {
45 | var error = JsonConvert.DeserializeObject(errorJson);
46 | return error;
47 | }
48 | catch(Exception e)
49 | {
50 | throw new JsonReaderException(e.Message);
51 | }
52 |
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/MainWindow.xaml:
--------------------------------------------------------------------------------
1 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/NoteTaker.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio 15
4 | VisualStudioVersion = 15.0.28307.489
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "NoteTaker", "NoteTaker.csproj", "{DB1BC364-5104-4E12-8B72-4E67023DF722}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|Any CPU = Debug|Any CPU
11 | Release|Any CPU = Release|Any CPU
12 | EndGlobalSection
13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
14 | {DB1BC364-5104-4E12-8B72-4E67023DF722}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
15 | {DB1BC364-5104-4E12-8B72-4E67023DF722}.Debug|Any CPU.Build.0 = Debug|Any CPU
16 | {DB1BC364-5104-4E12-8B72-4E67023DF722}.Release|Any CPU.ActiveCfg = Release|Any CPU
17 | {DB1BC364-5104-4E12-8B72-4E67023DF722}.Release|Any CPU.Build.0 = Release|Any CPU
18 | EndGlobalSection
19 | GlobalSection(SolutionProperties) = preSolution
20 | HideSolutionNode = FALSE
21 | EndGlobalSection
22 | GlobalSection(ExtensibilityGlobals) = postSolution
23 | SolutionGuid = {CDAD12C4-E02E-4710-A03C-F828C390A22E}
24 | EndGlobalSection
25 | EndGlobal
26 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/Properties/Settings.Designer.cs:
--------------------------------------------------------------------------------
1 | //------------------------------------------------------------------------------
2 | //
3 | // This code was generated by a tool.
4 | // Runtime Version:4.0.30319.42000
5 | //
6 | // Changes to this file may cause incorrect behavior and will be lost if
7 | // the code is regenerated.
8 | //
9 | //------------------------------------------------------------------------------
10 |
11 | namespace NoteTaker.Properties {
12 |
13 |
14 | [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
15 | [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "15.9.0.0")]
16 | internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
17 |
18 | private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
19 |
20 | public static Settings Default {
21 | get {
22 | return defaultInstance;
23 | }
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/Properties/Settings.settings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/app.manifest:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/dotnet/Vision/InkRecognition/wpf-app/src/packages.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/java/InkRecognition/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - java
5 | products:
6 | - azure
7 | - cognitive services
8 | ---
9 |
10 | # Ink Recognizer REST API samples
11 |
12 | These samples are quickstarts that show how to use the Ink Recognizer API.
13 |
14 | ## Prerequisites
15 |
16 | - An [Azure Ink Recognizer resource](https://portal.azure.com/#blade/Microsoft_Azure_Marketplace/MarketplaceOffersBlade/selectedMenuItemId/home/searchQuery/ink%20recognizer)
17 | - For the sample in the quickstart folder:
18 | * copy/paste the `.java` file into your IDE or text editor,
19 | * add the `example-ink-strokes.json` file as a resource to your project or working directory,
20 | * create a lib folder in your working directory (or IDE project) and add the Java jar libraries needed:
21 | httpclient-4.5.11+
22 | slf4j-jdk14-1.7.28+
23 | httpcore-4.4.13+
24 | commons-logging-1.2+
25 | jackson-databind-2.10.2+
26 | jackson-annotations-2.10.2+
27 | jackson-core-2.10.2+
28 | - Add your key and endpoint from your Azure resource to your environment variables with the variable names suggested in the sample.
29 |
30 | ## Running the samples
31 | - If trying the Android app, upload the Recognizer repo into Android Studio, add environment variables for key/endpoint, and run.
32 | - For the quickstart, run from your IDE or using the below commands from the command line:
33 | `javac RecognizeInk.java -cp .;lib\* -encoding UTF-8`
34 | `java -cp .;lib\* RecognizeInk`
35 |
36 | ## Resources
37 | - Ink Recognizer documentation:
38 | https://docs.microsoft.com/en-us/azure/cognitive-services/ink-recognizer/index
39 | - Ink Recognizer API:
40 | https://dev.cognitive.microsoft.com/docs/services/inkrecognizer/operations/inkRecognizerPUT
41 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/caches
5 | /.idea/libraries
6 | /.idea/modules.xml
7 | /.idea/workspace.xml
8 | /.idea/navEditor.xml
9 | /.idea/assetWizardSettings.xml
10 | .DS_Store
11 | /build
12 | /captures
13 | .externalNativeBuild
14 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 28
5 | defaultConfig {
6 | applicationId "CognitiveServices.Ink.Recognition"
7 | minSdkVersion 15
8 | targetSdkVersion 28
9 | versionCode 1
10 | versionName "1.0"
11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
12 | }
13 | buildTypes {
14 | release {
15 | minifyEnabled false
16 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
17 | }
18 | }
19 | }
20 |
21 | dependencies {
22 | implementation fileTree(dir: 'libs', include: ['*.jar'])
23 | implementation 'com.android.support:appcompat-v7:28.0.0'
24 | implementation 'com.android.support.constraint:constraint-layout:1.1.3'
25 | implementation 'com.android.support:design:28.0.0'
26 | testImplementation 'junit:junit:4.12'
27 | androidTestImplementation 'com.android.support.test:runner:1.0.2'
28 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
29 | }
30 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
15 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkBullet.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.util.DisplayMetrics;
4 | import org.json.JSONException;
5 | import org.json.JSONObject;
6 |
7 | class InkBullet extends InkRecognitionUnit {
8 |
9 | public String getText() {
10 | return text;
11 | }
12 |
13 | private String text;
14 |
15 | InkBullet(String bulletJSON, DisplayMetrics metrics, InkRoot result) throws JSONException {
16 | super(bulletJSON, metrics, result);
17 | JSONObject jsonBullet = new JSONObject(bulletJSON);
18 | this.text = jsonBullet.getString("recognizedText");
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkLine.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.util.DisplayMetrics;
4 |
5 | import org.json.JSONArray;
6 | import org.json.JSONException;
7 | import org.json.JSONObject;
8 | import java.util.ArrayList;
9 |
10 | class InkLine extends InkRecognitionUnit {
11 |
12 | private float indentLevel = 0.0f;
13 | private String text;
14 | private final ArrayList alternates = new ArrayList<>();
15 |
16 | public float getIndentLevel() {
17 | return indentLevel;
18 | }
19 |
20 | public String getText() {
21 | return text;
22 | }
23 |
24 | public ArrayList getAlternates() {
25 | return alternates;
26 | }
27 |
28 | InkLine(String lineJSON, DisplayMetrics metrics, InkRoot result) throws JSONException {
29 | super(lineJSON, metrics, result);
30 | JSONObject jsonLine = new JSONObject(lineJSON);
31 | JSONArray jsonAlternates = jsonLine.getJSONArray("alternates");
32 | if (jsonLine.has("indentLevel")) {
33 | this.indentLevel = jsonLine.getInt("indentLevel");
34 | }
35 | this.text = jsonLine.getString("recognizedText");
36 |
37 | for (int i=0;
38 | i < jsonAlternates.length();
39 | i++) {
40 | alternates.add(jsonAlternates.getJSONObject(i).getString("recognizedString"));
41 |
42 | }
43 |
44 | }
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkListItem.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.util.DisplayMetrics;
4 |
5 | import org.json.JSONException;
6 |
7 | class InkListItem extends InkRecognitionUnit {
8 |
9 | InkListItem(String listJSON, DisplayMetrics metrics, InkRoot result) throws JSONException {
10 | super(listJSON, metrics,result);
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkParagraph.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.util.DisplayMetrics;
4 |
5 | import org.json.JSONException;
6 |
7 | class InkParagraph extends InkRecognitionUnit {
8 |
9 | InkParagraph(String paragraphJSON, DisplayMetrics metrics, InkRoot result) throws JSONException {
10 | super(paragraphJSON, metrics, result);
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkPoint.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | public class InkPoint {
4 | final float x;
5 | final float y;
6 |
7 | InkPoint(float x, float y) {
8 | this.y = y;
9 | this.x = x;
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkRecognitionUnitCategory.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.support.annotation.NonNull;
4 |
5 | public enum InkRecognitionUnitCategory {
6 | UNKNOWN("Unknown"),
7 | INK_BULLET("Bullet"),
8 | INK_LIST_ITEM("ListItem"),
9 | INK_WORD("Word"),
10 | INK_DRAWING ("Drawing"),
11 | INK_PARAGRAPH("Paragraph"),
12 | INK_LINE ("Line"),
13 | INK_WRITING_REGION("WritingRegion");
14 |
15 | private final String category;
16 | InkRecognitionUnitCategory(String category) {
17 | this.category = category;
18 | }
19 |
20 | @NonNull
21 | public String toString(){
22 | return category;
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkStroke.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.util.DisplayMetrics;
4 | import java.util.ArrayList;
5 |
6 | class InkStroke {
7 | final int strokeId;
8 | final ArrayList inkPoints = new ArrayList<>();
9 | final String language;
10 | final StrokeKind kind;
11 | private final float xdpi;
12 | private final float ydpi;
13 | private static int num = 0;
14 |
15 | @SuppressWarnings("unused")
16 | InkStroke(int id, String language, StrokeKind kind, DisplayMetrics metrics) {
17 | this.xdpi = metrics.xdpi;
18 | this.ydpi = metrics.ydpi;
19 | this.strokeId = id;
20 | this.language = language;
21 | this.kind = kind;
22 | }
23 |
24 | InkStroke(DisplayMetrics metrics) {
25 | this.xdpi = metrics.xdpi;
26 | this.ydpi = metrics.ydpi;
27 | this.strokeId = getNextNum();
28 | this.language = "en-US";
29 | this.kind = StrokeKind.UNKNOWN;
30 | }
31 |
32 | public void addPoint(float x, float y) {
33 | x = x / xdpi * InkRecognitionUnit.INCH_TO_MM;
34 | y = y / ydpi * InkRecognitionUnit.INCH_TO_MM;
35 | InkPoint point = new InkPoint(x, y);
36 | inkPoints.add(point);
37 | }
38 |
39 | private int getNextNum()
40 | {
41 | return ++num;
42 | }
43 | }
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkWord.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.util.DisplayMetrics;
4 |
5 | import org.json.JSONArray;
6 | import org.json.JSONException;
7 | import org.json.JSONObject;
8 |
9 | import java.util.ArrayList;
10 |
11 | class InkWord extends InkRecognitionUnit {
12 |
13 | public ArrayList getAlternates() {
14 | return alternates;
15 | }
16 |
17 | private final ArrayList alternates = new ArrayList<>();
18 |
19 | public String getText() {
20 | return text;
21 | }
22 |
23 | private String text;
24 |
25 | InkWord(String wordJSON, DisplayMetrics metrics, InkRoot result) throws JSONException {
26 | super(wordJSON, metrics, result);
27 | JSONObject jsonWord = new JSONObject(wordJSON);
28 | JSONArray jsonAlternates = jsonWord.getJSONArray("alternates");
29 |
30 | for (int i=0;
31 | i < jsonAlternates.length();
32 | i++) {
33 | alternates.add(jsonAlternates.getJSONObject(i).getString("recognizedString"));
34 | }
35 |
36 | this.text = jsonWord.getString("recognizedText");
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/InkWritingRegion.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.util.DisplayMetrics;
4 |
5 | import org.json.JSONException;
6 |
7 | class InkWritingRegion extends InkRecognitionUnit {
8 | InkWritingRegion(String paragraphJSON, DisplayMetrics metrics, InkRoot result) throws JSONException {
9 | super(paragraphJSON, metrics, result);
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/MainActivity.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | import android.os.Bundle;
4 | import android.support.v7.app.AppCompatActivity;
5 |
6 | public class MainActivity extends AppCompatActivity {
7 |
8 | @Override
9 | protected void onCreate(Bundle savedInstanceState) {
10 | super.onCreate(savedInstanceState);
11 | NoteTaker noteTaker = new NoteTaker(this);
12 | setContentView(noteTaker);
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/RecognitionResultStatus.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | public enum RecognitionResultStatus {
4 | UNCHANGED,
5 | UPDATED,
6 | FAILED
7 | }
8 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/Rectangle.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | public class Rectangle {
4 | public float getX() {
5 | return x;
6 | }
7 |
8 | public float getY() {
9 | return y;
10 | }
11 |
12 | public float getWidth() {
13 | return width;
14 | }
15 |
16 | public float getHeight() {
17 | return height;
18 | }
19 |
20 | private final float x;
21 | private final float y;
22 | private final float width;
23 | private final float height;
24 | Rectangle(float x, float y, float width, float height) {
25 | this.x = x;
26 | this.y = y;
27 | this.width = width;
28 | this.height = height;
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/Shape.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | @SuppressWarnings("SpellCheckingInspection")
4 | public enum Shape {
5 | DRAWING,
6 | SQUARE,
7 | RECTANGLE,
8 | CIRCLE,
9 | ELLIPSE,
10 | TRIANGLE,
11 | ISOSCELESTRIANGLE,
12 | EQUILATERALTRIANGLE,
13 | RIGHTTRIANGLE,
14 | QUADRILATERAL,
15 | DIAMOND,
16 | TRAPEZOID,
17 | PARALLELOGRAM,
18 | PENTAGON,
19 | HEXAGON,
20 | BLOCKARROW,
21 | HEART,
22 | STARSIMPLE,
23 | STARCROSSED,
24 | CLOUD,
25 | LINE,
26 | CURVE,
27 | POLYLINE
28 | }
29 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/java/CognitiveServices/Ink/Recognition/StrokeKind.java:
--------------------------------------------------------------------------------
1 | package CognitiveServices.Ink.Recognition;
2 |
3 | public enum StrokeKind {
4 | DRAWING,
5 | WRITING,
6 | UNKNOWN,
7 | }
8 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #008577
4 | #00574B
5 | #D81B60
6 |
7 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Recognizer
3 |
4 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/app/src/main/res/xml/backup_descriptor.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | google()
6 | jcenter()
7 |
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:3.4.0'
11 |
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 |
22 | }
23 | }
24 |
25 | task clean(type: Delete) {
26 | delete rootProject.buildDir
27 | }
28 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 |
15 |
16 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/InkRecognition/android-sample-app/Recognizer/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Apr 29 13:55:11 PDT 2019
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
7 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/java/InkRecognition/android-sample-app/Recognizer/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------
/java/Language/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - java
5 | products:
6 | - azure
7 | - cognitive services
8 | ---
9 |
10 | # Language REST API samples
11 |
12 | These samples are quickstarts that show how to use various Language APIs, such as Bing Spell Check and Text Analytics.
13 |
14 | ## Prerequisites
15 |
16 | - Create an [Azure resource](https://portal.azure.com) for the service you'd like to try, for example a Bing Spell Check resource.
17 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
18 | - Copy/paste the `.java` file into your project or text editor.
19 | - For Text Analytics an entire Java project is available for download.
20 | - Include the [JSON](https://github.com/stleary/JSON-java) library into a lib folder.
21 |
22 | ## Running the samples
23 | - Run Text Analytics in your IDE
24 | - Build/run Bing Spell Check from the command line with these commands:
25 |
26 | `javac BingSpellCheck.java -cp .;lib\* -encoding UTF-8`
27 | `java -cp .;lib\* BingSpellCheck`
28 |
29 | ## Resources
30 | #### Bing Spell Check: [Documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-spell-check/overview), [API](https://dev.cognitive.microsoft.com/docs/services/5f7d486e04d2430193e1ca8f760cd7ed/operations/57855119bca1df1c647bc358)
31 |
32 | #### Text Analytics: [Documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/index), [API](https://westus.dev.cognitive.microsoft.com/docs/services/TextAnalytics-v3-0-Preview-1/operations/Languages)
33 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/.classpath:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | azure-textanalytics-sample
4 |
5 |
6 |
7 |
8 |
9 | org.eclipse.jdt.core.javabuilder
10 |
11 |
12 |
13 |
14 | org.eclipse.m2e.core.maven2Builder
15 |
16 |
17 |
18 |
19 |
20 | org.eclipse.jdt.core.javanature
21 | org.eclipse.m2e.core.maven2Nature
22 |
23 |
24 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/.settings/org.eclipse.jdt.apt.core.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | org.eclipse.jdt.apt.aptEnabled=false
3 | org.eclipse.jdt.apt.genSrcDir=target\\generated-sources\\annotations
4 | org.eclipse.jdt.apt.genTestSrcDir=target\\generated-test-sources\\test-annotations
5 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/.settings/org.eclipse.jdt.core.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
3 | org.eclipse.jdt.core.compiler.compliance=1.7
4 | org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
5 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
6 | org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore
7 | org.eclipse.jdt.core.compiler.processAnnotations=disabled
8 | org.eclipse.jdt.core.compiler.release=disabled
9 | org.eclipse.jdt.core.compiler.source=1.7
10 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/.settings/org.eclipse.m2e.core.prefs:
--------------------------------------------------------------------------------
1 | activeProfiles=
2 | eclipse.preferences.version=1
3 | resolveWorkspaceProjects=true
4 | version=1
5 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "configurations": [
3 | {
4 | "type": "java",
5 | "name": "CodeLens (Launch) - TextAnalytics",
6 | "request": "launch",
7 | "mainClass": "com.microsoft.azure.textanalytics.samples.TextAnalytics",
8 | "projectName": "azure-textanalytics-sample"
9 | }
10 | ]
11 | }
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/README.md:
--------------------------------------------------------------------------------
1 | # Quickstart for Text Analytics API with Java
2 |
3 |
4 | This article shows you how to detect language, analyze sentiment, and extract key phrases using the [Text Analytics APIs](//go.microsoft.com/fwlink/?LinkID=759711) with Java.
5 |
6 | Refer to the [API definitions](//go.microsoft.com/fwlink/?LinkID=759346) for technical documentation for the APIs.
7 |
8 | ## Prerequisites
9 |
10 | You must have a [Cognitive Services API account](https://docs.microsoft.com/azure/cognitive-services/cognitive-services-apis-create-account) with **Text Analytics API**. You can use the **free tier for 5,000 transactions/month** to complete this quickstart.
11 |
12 | You must also have the [endpoint and access key](../How-tos/text-analytics-how-to-access-key.md) that was generated for you during sign up.
13 |
14 |
15 | ### Quickstart
16 |
17 | To get these samples running locally, simply get the pre-requisites above, then:
18 |
19 | 1. git clone https://github.com/Azure-Samples/cognitive-services-java-sdk-samples.git
20 | 2. cd cognitive-services-java-sdk-samples/TextAnalytics
21 | 3. mvn compile
22 | 4. set env variable AZURE_TEXTANALYTICS_API_KEY to your cognitive services API key.
23 | 5. mvn exec:java -Dexec.mainClass="com.microsoft.azure.textanalytics.samples.Samples"
24 |
25 | ## See also
26 |
27 | [Text Analytics overview](../overview.md)
28 | [Frequently asked questions (FAQ)](../text-analytics-resource-faq.md)
29 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 | com.microsoft.azure
6 | azure-textanalytics-sample
7 | 0.0.1-SNAPSHOT
8 | Simple.java
9 |
10 | https://github.com/Azure/bing-search-java
11 |
12 |
13 |
14 | org.codehaus.mojo
15 | exec-maven-plugin
16 | 1.4.0
17 |
18 | com.microsoft.azure.textanalytics.samples.Samples
19 |
20 |
21 |
22 | maven-compiler-plugin
23 | 3.0
24 |
25 | 1.7
26 | 1.7
27 |
28 |
29 |
30 | maven-assembly-plugin
31 |
32 |
33 | package
34 |
35 | attached
36 |
37 |
38 |
39 | jar-with-dependencies
40 |
41 |
42 |
43 | com.microsoft.azure.textanalytics.samples.Samples
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | com.google.code.gson
55 | gson
56 | 2.1
57 | provided
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/Document.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/Document.class
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/Documents.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/Documents.class
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/EntityRecognition.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/EntityRecognition.class
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/KeyPhraseExtraction.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/KeyPhraseExtraction.class
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/LanguageDetection.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/LanguageDetection.class
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/SentimentAnalysis.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/SentimentAnalysis.class
--------------------------------------------------------------------------------
/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/TextAnalytics.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/java/Language/TextAnalytics/target/classes/com/microsoft/azure/textanalytics/samples/TextAnalytics.class
--------------------------------------------------------------------------------
/java/Vision/README.md:
--------------------------------------------------------------------------------
1 |
2 | # Vision REST API samples
3 |
4 | These samples are quickstarts that show how to use various Vision APIs, such as Face.
5 |
6 | ## Prerequisites
7 | - Create an [Azure resource](https://portal.azure.com) for the service you'd like to try, for example a Face resource.
8 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
9 | - Add these jar libraries to your project or create a `lib` folder in your working directory and add them there:
10 | * commons-logging-4.0.6+
11 | * httpclient-4.5.3+
12 | * httpcore-4.4.13+
13 | * json-20190722+
14 |
15 | ## Running the samples
16 | Build/run in your IDE or from the command line, for example:
17 | `javac Main.java -cp .;lib\*`
18 | `java -cp .;lib\* Main`
19 |
20 | ## Resources
21 | - Computer Vision documentation:
22 | https://docs.microsoft.com/en-us/azure/cognitive-services/computer-vision/home
23 | - Computer Vision 2.1 API:
24 | https://westus.dev.cognitive.microsoft.com/docs/services/5cd27ec07268f6c679a3e641/operations/56f91f2e778daf14a499f21b
25 | - Face documentation:
26 | https://docs.microsoft.com/en-us/azure/cognitive-services/face/index
27 | - Face 1.0 API:
28 | https://docs.microsoft.com/en-us/azure/cognitive-services/face/apireference
29 |
--------------------------------------------------------------------------------
/javascript/InkRecognition/javascript-app/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation. All rights reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
--------------------------------------------------------------------------------
/javascript/InkRecognition/javascript-app/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - javascript
5 | ---
6 |
7 | # InkRecognizer Cognitive Service JavaScript Sample
8 | Ink Recognizer Cognitive Service provides recognition of digital ink. It takes the digital ink stroke data as input and provides a document tree with individual recognition units as output. This project has sample code to demonstrate a few ways developers can take advantage of the service.
9 |
10 |  
11 |
12 | ## Features
13 |
14 | This project provides the following features:
15 |
16 | * Capturing very basic inking input.
17 | * Creating the JSON payload using the Ink Recognizer Service JSON schema.
18 | * Asynchronously calling the Ink Recognizer REST APIs with the JSON payload
19 | * Parsing the JSON response from the service to build the document tree.
20 |
21 | ## Contents
22 |
23 | | File/folder | Description |
24 | |-------------|-------------|
25 | | `src` | Sample source code. |
26 | | `README.md` | This README file. |
27 | | `LICENSE` | The license for the sample. |
28 |
29 | ## Getting Started
30 |
31 | ### Prerequisites
32 | Request a subscription key.
33 |
34 | ### Quick Start
35 | 1. Clone or download this sample.
36 | 2. Replace in config.js with your valid subscription key.
37 | 3. Open sample.html in browser.
38 |
--------------------------------------------------------------------------------
/javascript/InkRecognition/javascript-app/src/config.js:
--------------------------------------------------------------------------------
1 | // Service endpoint configurations
2 |
3 | SERVER_ADDRESS = "https://api.cognitive.microsoft.com";
4 | ENDPOINT_URL = SERVER_ADDRESS + "/inkrecognizer/v1.0-preview/recognize";
5 | SUBSCRIPTION_KEY = "";
6 |
7 | // Languages for user to try
8 | LANGUAGE_TAGS_TO_TRY = ["en-US", "de-DE", "en-GB", "fr-FR", "hi-IN", "ja-JP", "ko-KR", "zh-CN"];
9 |
10 | // Window.devicePixelRatio could change, e.g., when user drags the window to a display with different pixel density,
11 | // however, there is no callback or event available to detect the change.
12 | // In this sample, we assume devicePixelRatio doesn't change.
13 | PIXEL_RATIO = window.devicePixelRatio;
14 | MILLIMETER_PER_INCH = 25.4;
15 | PIXEL_PER_INCH = 96;
16 | MILLIMETER_TO_PIXELS = PIXEL_PER_INCH / (MILLIMETER_PER_INCH * PIXEL_RATIO);
17 | PIXEL_TO_MILLIMETERS = MILLIMETER_PER_INCH * PIXEL_RATIO / PIXEL_PER_INCH;
--------------------------------------------------------------------------------
/nodejs/Language/BingSpellCheckv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | const request = require ('request')
7 |
8 | /**
9 | * This sample uses the Bing Spell Check API to check the spelling of a sentence.
10 | * It returns the identified misspellings with suggestions for correctly spelled words
11 | * Plus, a score of what are deemed to be the best matches for suggestions.
12 | */
13 |
14 | // Add your Bing Spell Check key and endpoint (host) to your environment variables.
15 | // Note v5 and v7 require separate subscription keys.
16 | let key = process.env['BING_SPELL_CHECK_SUBSCRIPTION_KEY']
17 | let endpoint = process.env['BING_SPELL_CHECK_ENDPOINT'] + '/bing/v7.0/spellcheck/'
18 |
19 | let text = "Hollo, wrld!"
20 | let mode = "proof"
21 | let mkt = "en-US"
22 |
23 | // These values are used for optional headers (see below).
24 | // let CLIENT_ID = "";
25 | // let CLIENT_IP = "999.999.999.999";
26 | // let CLIENT_LOCATION = "+90.0000000000000;long: 00.0000000000000;re:100.000000000000";
27 |
28 | let headers = {
29 | 'Content-Type': 'application/x-www-form-urlencoded',
30 | 'Content-Length': text.length + 5,
31 | 'Ocp-Apim-Subscription-Key': key
32 | // Optional Headers
33 | // 'X-Search-Location' : CLIENT_LOCATION,
34 | // 'X-MSEdge-ClientID' : CLIENT_ID,
35 | // 'X-MSEdge-ClientIP' : CLIENT_ID,
36 | }
37 |
38 | let request_params = {
39 | method: 'POST',
40 | url: endpoint,
41 | headers: headers,
42 | qs: {
43 | mode: mode,
44 | mkt: mkt,
45 | text: text
46 | },
47 | json: true
48 | }
49 |
50 | request(request_params, function (error, response, body) {
51 | console.error('error:', error)
52 | console.log('statusCode:', response && response.statusCode)
53 |
54 | // Print suggestions for each misspelled word (token)
55 | body.flaggedTokens.forEach(token => {
56 | console.log(token)
57 | })
58 | })
59 |
--------------------------------------------------------------------------------
/nodejs/Language/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - nodejs
5 | products:
6 | - azure
7 | - cognitive services
8 | ---
9 |
10 | # Language REST API samples
11 |
12 | These samples are quickstarts that show how to use various Language APIs, such as Bing Spell Check.
13 |
14 | ## Prerequisites
15 |
16 | - Create an [Azure resource](https://portal.azure.com) for the service you'd like to try, for example a Bing Spell Check resource.
17 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
18 | - Copy/paste the `.js` file into your project or text editor.
19 |
20 | ## Running the samples
21 | - Run from your IDE or the command line:
22 | `node BingSpellCheckv7.js`
23 |
24 | ## Resources
25 | #### Bing Spell Check: [Documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-spell-check/overview), [API](https://dev.cognitive.microsoft.com/docs/services/5f7d486e04d2430193e1ca8f760cd7ed/operations/57855119bca1df1c647bc358)
26 |
--------------------------------------------------------------------------------
/nodejs/Search/BingAutosuggestv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let request = require ('request');
7 |
8 | /**
9 | * This samnple uses the Bing Autosuggest API with a text search query
10 | * that returns website suggestions based on the word(s) submitted.
11 | */
12 |
13 | // Add your Bing Autosuggest subscription key and endpoint to your environment variables.
14 | let subscriptionKey = process.env['BING_AUTOSUGGEST_SUBSCRIPTION_KEY']
15 | let endpoint = process.env['BING_AUTOSUGGEST_ENDPOINT'] + '/bing/v7.0/Suggestions';
16 |
17 | // Search term
18 | let query = 'sail';
19 | // Market to perform the search
20 | let mkt = 'en-US'
21 |
22 | // Construct parameters
23 | let request_params = {
24 | method: 'GET',
25 | uri: endpoint,
26 | headers: { 'Ocp-Apim-Subscription-Key': subscriptionKey },
27 | qs: { q: query, mkt: mkt },
28 | json: true
29 | }
30 |
31 | // Make request
32 | request(request_params, function (error, response, body) {
33 | console.error('error:', error)
34 | console.log('statusCode:', response && response.statusCode)
35 |
36 | console.log(body)
37 | console.log()
38 | body.suggestionGroups.forEach( sugg => {
39 | console.log(sugg)
40 | })
41 | })
42 |
--------------------------------------------------------------------------------
/nodejs/Search/BingCustomSearchv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | //
5 | var request = require("request");
6 |
7 | /**
8 | * This sample uses the Bing Custom Search API to send a customized query
9 | * that returns a lot of data about the query with results.
10 | */
11 |
12 | // Add your Bing Custom Search subscription key and endpoint to your environment variables.
13 | var subscriptionKey = process.env['BING_CUSTOM_SEARCH_SUBSCRIPTION_KEY'];
14 | var endpoint = process.env['BING_CUSTOM_SEARCH_ENDPOINT'] + "/bingcustomsearch/v7.0/search?";
15 |
16 | var customConfigId = process.env['BING_CUSTOM_CONFIG']; //you can also use "1"
17 |
18 | // Word(s) you want to search for.
19 | var query = 'Microsoft';
20 | // Market you want to search in.
21 | let mkt = 'en-US'
22 | //
23 |
24 | //
25 | // Construct parameters
26 | let request_params = {
27 | uri: endpoint,
28 | headers: {
29 | 'Ocp-Apim-Subscription-Key': subscriptionKey
30 | },
31 | qs: {
32 | customConfig: customConfigId,
33 | q: query,
34 | mkt: mkt
35 | },
36 | json: true
37 | }
38 | //
39 |
40 | //
41 | // Make request
42 | request(request_params, function (error, response, body) {
43 | console.error('error:', error)
44 | console.log('statusCode:', response && response.statusCode)
45 |
46 | console.log(body.queryContext)
47 | console.log()
48 | body.webPages.value.forEach(v => {
49 | console.log(v)
50 | })
51 | })
52 | //
53 |
--------------------------------------------------------------------------------
/nodejs/Search/BingEntitySearchv7.js:
--------------------------------------------------------------------------------
1 |
2 | 'use strict';
3 |
4 | let request = require ('request');
5 |
6 | /**
7 | * This sample uses the Bing Entity Search API to use an entity query to
8 | * get results with entity details like address and contact information.
9 | */
10 |
11 | // Add your Bing Entity Search subscription key and endpoint to your environment variables.
12 | let subscriptionKey = process.env['BING_ENTITY_SEARCH_SUBSCRIPTION_KEY']
13 | let endpoint = process.env['BING_ENTITY_SEARCH_ENDPOINT'] + '/bing/v7.0/entities';
14 |
15 | let mkt = 'en-US';
16 | let query = 'italian restaurant near me';
17 |
18 | //let query = '?mkt=' + mkt + '&q=' + encodeURI(q);
19 |
20 | // Construct parameters
21 | let request_params = {
22 | method: 'GET',
23 | uri: endpoint,
24 | headers: {
25 | 'Ocp-Apim-Subscription-Key': subscriptionKey
26 | },
27 | qs: {
28 | q: query,
29 | mkt: mkt
30 | },
31 | json: true
32 | }
33 |
34 | // Make request
35 | request(request_params, function (error, response, body) {
36 | console.error('error:', error)
37 | console.log('statusCode:', response && response.statusCode)
38 |
39 | console.log(body.queryContext.originalQuery)
40 | console.log()
41 | body.places.value.forEach(entity => {
42 | console.log(entity)
43 | })
44 | })
45 |
--------------------------------------------------------------------------------
/nodejs/Search/BingImageSearchv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let request = require('request');
7 |
8 | /**
9 | * This sample uses the Bing Image Search API to query a search topic
10 | * and return image results for that topic, along with metadata.
11 | */
12 |
13 | // Add your Bing Search V7 subscription key and endpoint to your environment variables.
14 | let subscriptionKey = process.env['BING_SEARCH_V7_SUBSCRIPTION_KEY']
15 | let endpoint = process.env['BING_SEARCH_V7_ENDPOINT'] + '/bing/v7.0/images/search';
16 |
17 | let query = 'puppies';
18 | let mkt = 'en-US'
19 |
20 | // Construct parameters
21 | let request_params = {
22 | method: 'GET',
23 | uri: endpoint,
24 | headers: {
25 | 'Ocp-Apim-Subscription-Key': subscriptionKey
26 | },
27 | qs: {
28 | q: query,
29 | mkt: mkt
30 | },
31 | json: true
32 | }
33 |
34 | // Make request
35 | request(request_params, function (error, response, body) {
36 | console.error('error:', error)
37 | console.log('statusCode:', response && response.statusCode)
38 | console.log('original query: ' + body.queryContext.originalQuery)
39 | console.log()
40 | console.log(body)
41 | })
42 |
--------------------------------------------------------------------------------
/nodejs/Search/BingImageSearchv7Quickstart.js:
--------------------------------------------------------------------------------
1 | //Copyright (c) Microsoft Corporation. All rights reserved.
2 | //Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let https = require('https');
7 |
8 | // **********************************************
9 | // *** Update or verify the following values. ***
10 | // **********************************************
11 |
12 | // Add your Bing Search V7 subscription key to your environment variables.
13 | let subscriptionKey = process.env['BING_SEARCH_V7_SUBSCRIPTION_KEY']
14 |
15 | // Add your Bing Search V7 endpoint to your environment variables.
16 | let host = process.env['BING_SEARCH_V7_ENDPOINT']
17 | let path = '/bing/v7.0/images/search';
18 |
19 | let term = 'tropical ocean';
20 |
21 | let response_handler = function (response) {
22 | let body = '';
23 | response.on('data', function (d) {
24 | body += d;
25 | });
26 | response.on('end', function () {
27 | let imageResults = JSON.parse(body);
28 | if (imageResults.value.length > 0) {
29 | let firstImageResult = imageResults.value[0];
30 | console.log(`Image result count: ${imageResults.value.length}`);
31 | console.log(`First image insightsToken: ${firstImageResult.imageInsightsToken}`);
32 | console.log(`First image thumbnail url: ${firstImageResult.thumbnailUrl}`);
33 | console.log(`First image web search url: ${firstImageResult.webSearchUrl}`);
34 | }
35 | else {
36 | console.log("Couldn't find image results!");
37 | }
38 |
39 |
40 |
41 | });
42 | response.on('error', function (e) {
43 | console.log('Error: ' + e.message);
44 | });
45 | };
46 |
47 | let bing_image_search = function (search) {
48 | console.log('Searching images for: ' + term);
49 | let request_params = {
50 | method : 'GET',
51 | hostname : host,
52 | path : path + '?q=' + encodeURIComponent(search),
53 | headers : {
54 | 'Ocp-Apim-Subscription-Key' : subscriptionKey,
55 | }
56 | };
57 |
58 | let req = https.request(request_params, response_handler);
59 | req.end();
60 | }
61 |
62 | if (subscriptionKey.length === 32) {
63 | bing_image_search(term);
64 | } else {
65 | console.log('Invalid Bing Search API subscription key!');
66 | console.log('Please paste yours into the source code.');
67 | }
68 |
--------------------------------------------------------------------------------
/nodejs/Search/BingNewsSearchv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let request = require('request');
7 |
8 | /**
9 | * This sample uses the Bing News Search API to query the web with a
10 | * search term(s) and get relevant news sites in return.
11 | */
12 |
13 | // Add your Bing Search V7 subscription key and endpoint to your environment variables.
14 | let subscriptionKey = process.env['BING_SEARCH_V7_SUBSCRIPTION_KEY']
15 | let endpoint = process.env['BING_SEARCH_V7_ENDPOINT'] + '/bing/v7.0/news/search';
16 |
17 | // News topics you'd like to search for.
18 | let query = 'Microsoft';
19 | // Market you'd like to search in.
20 | let mkt = 'en-US'
21 |
22 | // Construct parameters
23 | let request_params = {
24 | method: 'GET',
25 | uri: endpoint,
26 | headers: {
27 | 'Ocp-Apim-Subscription-Key': subscriptionKey
28 | },
29 | qs: {
30 | q: query,
31 | mkt: mkt
32 | },
33 | json: true
34 | }
35 |
36 | // Make request
37 | request(request_params, function (error, response, body) {
38 | console.error('error:', error)
39 | console.log('statusCode:', response && response.statusCode)
40 | console.log('original query: ' + body.queryContext.originalQuery)
41 | console.log()
42 | console.log(body)
43 | })
44 |
--------------------------------------------------------------------------------
/nodejs/Search/BingSpellCheckv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | const request = require ('request')
7 |
8 | /**
9 | * This sample uses the Bing Spell Check API to check the spelling of a sentence.
10 | * It returns the identified misspellings with suggestions for correctly spelled words
11 | * Plus, a score of what are deemed to be the best matches for suggestions.
12 | */
13 |
14 | // Add your Bing Spell Check key and endpoint (host) to your environment variables.
15 | // Note v5 and v7 require separate subscription keys.
16 | let key = process.env['BING_SPELL_CHECK_SUBSCRIPTION_KEY']
17 | let endpoint = process.env['BING_SPELL_CHECK_ENDPOINT'] + '/bing/v7.0/spellcheck/'
18 |
19 | let text = "Hollo, wrld!"
20 | let mode = "proof"
21 | let mkt = "en-US"
22 |
23 | // These values are used for optional headers (see below).
24 | // let CLIENT_ID = "";
25 | // let CLIENT_IP = "999.999.999.999";
26 | // let CLIENT_LOCATION = "+90.0000000000000;long: 00.0000000000000;re:100.000000000000";
27 |
28 | let headers = {
29 | 'Content-Type': 'application/x-www-form-urlencoded',
30 | 'Content-Length': text.length + 5,
31 | 'Ocp-Apim-Subscription-Key': key
32 | // Optional Headers
33 | // 'X-Search-Location' : CLIENT_LOCATION,
34 | // 'X-MSEdge-ClientID' : CLIENT_ID,
35 | // 'X-MSEdge-ClientIP' : CLIENT_ID,
36 | }
37 |
38 | let request_params = {
39 | method: 'POST',
40 | url: endpoint,
41 | headers: headers,
42 | qs: {
43 | mode: mode,
44 | mkt: mkt,
45 | text: text
46 | },
47 | json: true
48 | }
49 |
50 | request(request_params, function (error, response, body) {
51 | console.error('error:', error)
52 | console.log('statusCode:', response && response.statusCode)
53 |
54 | // Print suggestions for each misspelled word (token)
55 | body.flaggedTokens.forEach(token => {
56 | console.log(token)
57 | })
58 | })
59 |
--------------------------------------------------------------------------------
/nodejs/Search/BingVideoSearchv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let request = require('request');
7 |
8 | /**
9 | * This sample uses the Bing Video Search API with a query text that returns
10 | * links to relevant videos and metadata about them.
11 | */
12 |
13 | // Add your Bing Search V7 subscription key and endpoint to your environment variables.
14 | let subscriptionKey = process.env['BING_SEARCH_V7_SUBSCRIPTION_KEY']
15 | let endpoint = process.env['BING_SEARCH_V7_ENDPOINT'] + '/bing/v7.0/videos/search';
16 |
17 | // Topic you'd like to search for.
18 | let query = 'kittens';
19 | // Market you'd like to search in.
20 | let mkt = 'en-US'
21 |
22 | // Construct parameters
23 | let request_params = {
24 | method: 'GET',
25 | uri: endpoint,
26 | headers: {
27 | 'Ocp-Apim-Subscription-Key': subscriptionKey
28 | },
29 | qs: {
30 | q: query,
31 | mkt: mkt
32 | },
33 | json: true
34 | }
35 |
36 | // Make request
37 | request(request_params, function (error, response, body) {
38 | console.error('error:', error)
39 | console.log('statusCode:', response && response.statusCode)
40 | console.log('original query: ' + body.queryContext.originalQuery)
41 | console.log()
42 | console.log(body)
43 | })
44 |
--------------------------------------------------------------------------------
/nodejs/Search/BingVisualSearchv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | const request = require('request');
5 | const FormData = require('form-data');
6 | const fs = require('fs');
7 |
8 | // Add your Bing Search V7 subscription key and endpoint to your environment variables.
9 | let subscriptionKey = process.env['BING_SEARCH_V7_SUBSCRIPTION_KEY']
10 | let baseUri = process.env['BING_SEARCH_V7_ENDPOINT'] + '/bing/v7.0/images/visualsearch';
11 |
12 | let imagePath = "path-to-your-image";
13 |
14 | let form = new FormData();
15 | form.append("image", fs.createReadStream(imagePath));
16 |
17 | form.getLength(function(err, length){
18 | if (err) {
19 | return requestCallback(err);
20 | }
21 |
22 | let r = request.post(baseUri, requestCallback);
23 | r._form = form;
24 | r.setHeader('Ocp-Apim-Subscription-Key', subscriptionKey);
25 | });
26 |
27 | function requestCallback(err, res, body) {
28 | console.log(JSON.stringify(JSON.parse(body), null, ' '))
29 | }
30 |
--------------------------------------------------------------------------------
/nodejs/Search/BingWebSearchv7.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let request = require('request');
7 |
8 | /**
9 | * This sample uses the Bing Web Search API to get topic-specific webpages
10 | * back with a large amount of metadata.
11 | */
12 |
13 | // Add your Bing Search V7 subscription key and endpoint to your environment variables.
14 | let subscriptionKey = process.env['BING_SEARCH_V7_SUBSCRIPTION_KEY']
15 | let endpoint = process.env['BING_SEARCH_V7_ENDPOINT'] + '/bing/v7.0/search';
16 |
17 | // Search term(s) you'd like to search for.
18 | let query = 'Microsoft Cognitive Services';
19 | // Market you'd like to search in.
20 | let mkt = 'en-US'
21 |
22 | // Construct parameters
23 | let request_params = {
24 | method: 'GET',
25 | uri: endpoint,
26 | headers: {
27 | 'Ocp-Apim-Subscription-Key': subscriptionKey
28 | },
29 | qs: {
30 | q: query,
31 | mkt: mkt
32 | },
33 | json: true
34 | }
35 |
36 | // Make request
37 | request(request_params, function (error, response, body) {
38 | console.error('error:', error)
39 | console.log('statusCode:', response && response.statusCode)
40 | console.log('original query: ' + query)
41 | console.log()
42 | body.webPages.value.forEach(webpage => {
43 | console.log(webpage)
44 | })
45 | })
46 |
--------------------------------------------------------------------------------
/nodejs/Vision/AnalyzeRemoteImage.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict'
5 |
6 | const request = require('request')
7 |
8 | /**
9 | * This sample uses the Computer Vision API to analyze a remote image.
10 | * It returns image properties such as categories, description (tags), color, and size.
11 | *
12 | * Computer Vision API - v2 .1:
13 | * https: //westus.dev.cognitive.microsoft.com/docs/services/5cd27ec07268f6c679a3e641/operations/56f91f2e778daf14a499f20d
14 | */
15 |
16 | // Add your Computer Vision subscription key and endpoint to your environment variables.
17 | let subscriptionKey = process.env['COMPUTER_VISION_SUBSCRIPTION_KEY']
18 | let endpoint = process.env['COMPUTER_VISION_ENDPOINT'] + '/vision/v2.1/analyze'
19 |
20 | // Image to be analyzed; you may use your own URL image.
21 | const url =
22 | 'https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/objects.jpg'
23 |
24 | // Request parameters.
25 | const params = {
26 | 'visualFeatures': 'Categories,Description,Color',
27 | 'details': '',
28 | 'language': 'en'
29 | }
30 |
31 | // Options
32 | const options = {
33 | uri: endpoint,
34 | qs: params,
35 | body: '{"url": ' + '"' + url + '"}',
36 | headers: {
37 | 'Content-Type': 'application/json',
38 | 'Ocp-Apim-Subscription-Key' : subscriptionKey
39 | }
40 | }
41 |
42 | // Make the request.
43 | request.post(options, (error, response, body) => {
44 | console.error('error:', error)
45 | console.log('statusCode:', response && response.statusCode)
46 | console.log('original image:', url.substring(url.lastIndexOf('/') + 1))
47 | console.log()
48 | console.log(JSON.stringify(JSON.parse(body), null, 2))
49 | })
50 |
--------------------------------------------------------------------------------
/nodejs/Vision/ComputerVisionOCR.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let request = require('request')
7 |
8 | /**
9 | * This sample uses the Computer Vision API to detect printed text in an image,
10 | * then returns the text and its properties, such as language, text angle, orientation.
11 | * Bounding boxes are calculated around portions of the text.
12 | *
13 | * Computer Vision API - v2 .1:
14 | * https: //westus.dev.cognitive.microsoft.com/docs/services/5cd27ec07268f6c679a3e641/operations/56f91f2e778daf14a499f20d
15 | */
16 |
17 | // Add your Azure Computer Vision subscription key and endpoint to your environment variables.
18 | let subscriptionKey = process.env['COMPUTER_VISION_SUBSCRIPTION_KEY']
19 | let endpoint = process.env['COMPUTER_VISION_ENDPOINT'] + '/vision/v2.1/ocr'
20 |
21 | // An image with printed text; or replace with your own.
22 | let url = 'https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/printed_text.jpg'
23 |
24 | // Request parameters.
25 | const params = {
26 | 'language': 'unk', // auto-detects language
27 | 'detectOrientation': 'true',
28 | };
29 |
30 | // Options
31 | let request_params = {
32 | method: 'POST',
33 | uri: endpoint,
34 | qs: params,
35 | body: '{"url": ' + '"' + url + '"}',
36 | headers: {
37 | 'Content-Type': 'application/json',
38 | 'Ocp-Apim-Subscription-Key': subscriptionKey
39 | }
40 | }
41 |
42 | // Make request
43 | request(request_params, function (error, response, body) {
44 | console.error('error:', error)
45 | console.log('statusCode:', response && response.statusCode)
46 | console.log('original image: ' + url.substring(url.lastIndexOf('/') + 1))
47 | console.log()
48 | console.log(JSON.stringify(JSON.parse(body), null, 2))
49 | })
50 |
--------------------------------------------------------------------------------
/nodejs/Vision/FaceDetectFaces.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | 'use strict';
5 |
6 | let request = require('request')
7 |
8 | /**
9 | * This sample uses the Face API to detect faces from a remote image.
10 | *
11 | * Face API - V1 .0:
12 | * https: //westus.dev.cognitive.microsoft.com/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236
13 | */
14 |
15 | // Add your Azure Computer Vision subscription key and endpoint to your environment variables.
16 | let subscriptionKey = process.env['FACE_SUBSCRIPTION_KEY']
17 | let endpoint = process.env['FACE_ENDPOINT'] + '/face/v1.0/detect'
18 |
19 | // A remote image with several faces; you may replace with your own URL.
20 | let url = 'https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/faces.jpg'
21 |
22 | // Request parameters.
23 | const params = {
24 | 'returnFaceId': 'true',
25 | 'returnFaceLandmarks': 'false',
26 | 'returnFaceAttributes': 'age,gender,headPose,smile,facialHair,glasses,' +
27 | 'emotion,hair,makeup,occlusion,accessories,blur,exposure,noise'
28 | }
29 |
30 | // Options
31 | let request_params = {
32 | method: 'POST',
33 | uri: endpoint,
34 | qs: params,
35 | body: '{"url": ' + '"' + url + '"}',
36 | headers: {
37 | 'Content-Type': 'application/json',
38 | 'Ocp-Apim-Subscription-Key': subscriptionKey
39 | }
40 | }
41 |
42 | // Make request
43 | request(request_params, function (error, response, body) {
44 | console.error('error:', error)
45 | console.log('statusCode:', response && response.statusCode)
46 | console.log('original image: ' + url.substring(url.lastIndexOf('/') + 1))
47 | console.log()
48 | console.log(JSON.stringify(JSON.parse(body), null, 2))
49 | })
50 |
--------------------------------------------------------------------------------
/nodejs/Vision/README.md:
--------------------------------------------------------------------------------
1 | # Vision REST API samples
2 |
3 | These samples are quickstarts that show how to use various Vision APIs, such as Face.
4 |
5 | ## Prerequisites
6 | - Create an [Azure resource](https://portal.azure.com) for the service you'd like to try, for example a Face resource.
7 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
8 |
9 | ## Running the samples
10 | Build/run in your IDE or from the command line, for example:
11 | `node ComputerVisionOCR.js`
12 |
13 | ## Resources
14 | - Computer Vision documentation:
15 | https://docs.microsoft.com/en-us/azure/cognitive-services/computer-vision/home
16 | - Computer Vision 2.1 API:
17 | https://westus.dev.cognitive.microsoft.com/docs/services/5cd27ec07268f6c679a3e641/operations/56f91f2e778daf14a499f21b
18 | - Face documentation:
19 | https://docs.microsoft.com/en-us/azure/cognitive-services/face/index
20 | - Face 1.0 API:
21 | https://docs.microsoft.com/en-us/azure/cognitive-services/face/apireference
22 |
--------------------------------------------------------------------------------
/php/Search/BingWebSearchv7.php:
--------------------------------------------------------------------------------
1 | array (
15 | 'header' => $headers,
16 | 'method' => 'GET'));
17 |
18 | // Perform the request and receive a response.
19 | $context = stream_context_create($options);
20 | $result = file_get_contents($url . "?q=" . urlencode($query), false, $context);
21 |
22 | // Extract Bing HTTP headers.
23 | $headers = array();
24 | foreach ($http_response_header as $k => $v) {
25 | $h = explode(":", $v, 2);
26 | if (isset($h[1]))
27 | if (preg_match("/^BingAPIs-/", $h[0]) || preg_match("/^X-MSEdge-/", $h[0]))
28 | $headers[trim($h[0])] = trim($h[1]);
29 | }
30 |
31 | return array($headers, $result);
32 | }
33 | // Validate the subscription key.
34 | if (strlen($accessKey) == 32) {
35 |
36 | print "Searching the Web for: " . $term . "\n";
37 | // Makes the request.
38 | list($headers, $json) = BingWebSearch($endpoint, $accessKey, $term);
39 |
40 | print "\nRelevant Headers:\n\n";
41 | foreach ($headers as $k => $v) {
42 | print $k . ": " . $v . "\n";
43 | }
44 | // Prints JSON encoded response.
45 | print "\nJSON Response:\n\n";
46 | echo json_encode(json_decode($json), JSON_PRETTY_PRINT);
47 |
48 | } else {
49 |
50 | print("Invalid Bing Search API subscription key!\n");
51 | print("Please paste yours into the source code.\n");
52 |
53 | }
54 | ?>
55 |
--------------------------------------------------------------------------------
/python/Language/BingSpellCheckv7.py:
--------------------------------------------------------------------------------
1 | #Copyright (c) Microsoft Corporation. All rights reserved.
2 | #Licensed under the MIT License.
3 |
4 | import http.client
5 | import json
6 | import os
7 | from pprint import pprint
8 | import urllib.parse
9 |
10 | '''
11 | This sample uses the Bing Spell Check API to check the spelling of query words and then suggests corrections.
12 | Bing Spell Check API: https://dev.cognitive.microsoft.com/docs/services/5f7d486e04d2430193e1ca8f760cd7ed/operations/57855119bca1df1c647bc358
13 | '''
14 |
15 | text = 'Hollo, wrld!'
16 |
17 | params = {'mkt': 'en-US', 'mode': 'proof', 'text': text}
18 |
19 | # Add your Bing Spell Check subscription key anf endpoint to your environment variables.
20 | key = os.environ['BING_SPELL_CHECK_SUBSCRIPTION_KEY']
21 | host = os.environ['BING_SPELL_CHECK_ENDPOINT']
22 | host = host.replace('https://', '')
23 | path = '/bing/v7.0/spellcheck'
24 |
25 | headers = {'Ocp-Apim-Subscription-Key': key,
26 | 'Content-Type': 'application/x-www-form-urlencoded'}
27 |
28 | # The headers in the following example
29 | # are optional but should be considered as required:
30 | #
31 | # X-MSEdge-ClientIP: 999.999.999.999
32 | # X-Search-Location: lat: +90.0000000000000;long: 00.0000000000000;re:100.000000000000
33 | # X-MSEdge-ClientID:
34 |
35 | conn = http.client.HTTPSConnection(host)
36 | params = urllib.parse.urlencode (params)
37 | conn.request ("POST", path, params, headers)
38 | response = conn.getresponse ()
39 | pprint(json.loads(response.read()))
40 |
--------------------------------------------------------------------------------
/python/Language/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - python
5 | products:
6 | - azure
7 | - cognitive services
8 | ---
9 |
10 | # Language REST API samples
11 |
12 | These samples are quickstarts that show how to use various Language APIs, such as Bing Spell Check.
13 |
14 | ## Prerequisites
15 |
16 | - Create an [Azure resource](https://portal.azure.com) for the service you'd like to try, for example a Bing Spell Check resource.
17 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
18 | - Copy/paste the `.py` file into your project or text editor.
19 |
20 | ## Running the samples
21 | - Run from your IDE or the command line, for example:
22 | `python BingSpellCheckv7.py`
23 |
24 | ## Resources
25 | #### Bing Spell Check: [Documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-spell-check/overview), [API](https://dev.cognitive.microsoft.com/docs/services/5f7d486e04d2430193e1ca8f760cd7ed/operations/57855119bca1df1c647bc358)
26 |
--------------------------------------------------------------------------------
/python/Search/BingAutosuggestv7.py:
--------------------------------------------------------------------------------
1 | #Copyright (c) Microsoft Corporation. All rights reserved.
2 | #Licensed under the MIT License.
3 |
4 | # -*- coding: utf-8 -*-
5 |
6 | import json
7 | import os
8 | import requests
9 | from pprint import pprint
10 |
11 | '''
12 | This sample uses the Bing Autosuggest API to check the spelling of query words and then suggests corrections.
13 | Bing Spell Check API: https://docs.microsoft.com/en-us/rest/api/cognitiveservices-bingsearch/bing-autosuggest-api-v7-reference57855119bca1df1c647bc358
14 | '''
15 |
16 | # Add your Bing Autosuggest subscription key and endpoint to your environment variables.
17 | subscription_key = os.environ['BING_AUTOSUGGEST_SUBSCRIPTION_KEY']
18 | endpoint = os.environ['BING_AUTOSUGGEST_ENDPOINT'] + '/bing/v7.0/Suggestions/'
19 |
20 | # Construct the request
21 | mkt = 'en-US'
22 | query = 'sail'
23 | params = { 'q': query, 'mkt': mkt }
24 | headers = { 'Ocp-Apim-Subscription-Key': subscription_key }
25 |
26 | # Call the API
27 | try:
28 | response = requests.get(endpoint, headers=headers, params=params)
29 | response.raise_for_status()
30 |
31 | print("\nHeaders:\n")
32 | print(response.headers)
33 |
34 | print("\nJSON Response:\n")
35 | pprint(response.json())
36 | except Exception as ex:
37 | raise ex
38 |
--------------------------------------------------------------------------------
/python/Search/BingCustomSearchv7.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | # You may need the below as well
5 | # pip install pipenv
6 | # pipenv install requests
7 | #
8 | import json
9 | import os
10 | from pprint import pprint
11 | import requests
12 |
13 | '''
14 | This sample uses the Bing Custom Search API to search for a query topic and get back user-controlled web page results.
15 | Bing Custom Search API: https://docs.microsoft.com/en-us/rest/api/cognitiveservices-bingsearch/bing-custom-search-api-v7-reference
16 | '''
17 |
18 | # Add your Bing Custom Search subscription key and endpoint to your environment variables.
19 | # Your endpoint will have the form: https://.cognitiveservices.azure.com
20 | subscriptionKey = os.environ['BING_CUSTOM_SEARCH_SUBSCRIPTION_KEY']
21 | endpoint = os.environ['BING_CUSTOM_SEARCH_ENDPOINT']
22 | customConfigId = os.environ["BING_CUSTOM_CONFIG"] # you can also use "1"
23 | searchTerm = "microsoft"
24 | #
25 | #
26 | # Add your Bing Custom Search endpoint to your environment variables.
27 | url = endpoint + "/bingcustomsearch/v7.0/search?q=" + searchTerm + "&customconfig=" + customConfigId
28 | #
29 | #
30 | r = requests.get(url, headers={'Ocp-Apim-Subscription-Key': subscriptionKey})
31 | pprint(json.loads(r.text))
32 | #
33 |
--------------------------------------------------------------------------------
/python/Search/BingEntitySearchv7.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from pprint import pprint
4 | import requests
5 | import urllib.parse
6 |
7 | '''
8 | This sample uses the Bing Entity Search v7 to search for restaurants and return details about it.
9 | Bing Entity Search API: https://westus2.dev.cognitive.microsoft.com/docs/services/7a3fb374be374859a823b79fd938cc65/operations/52069701a465405ab3286f82
10 | '''
11 |
12 | # Add your Bing Entity Search subscription key and endpoint to your environment variables.
13 | subscription_key = os.environ['BING_ENTITY_SEARCH_SUBSCRIPTION_KEY']
14 | endpoint = os.environ['BING_ENTITY_SEARCH_ENDPOINT'] + '/bing/v7.0/entities'
15 |
16 | # Entity you want to find
17 | query = 'italian restaurants near me'
18 |
19 | # Construct the request
20 | mkt = 'en-US'
21 | params = 'mkt=' + mkt + '&q=' + urllib.parse.quote(query)
22 | headers = {'Ocp-Apim-Subscription-Key': subscription_key}
23 |
24 | # Call the API
25 | try:
26 | response = requests.get(endpoint, headers=headers, params=params)
27 | response.raise_for_status()
28 |
29 | print("\nHeaders:\n")
30 | print(response.headers)
31 |
32 | print("\nJSON Response:\n")
33 | pprint(response.json())
34 | except Exception as ex:
35 | raise ex
36 | pprint(json.loads(response.read()))
37 |
--------------------------------------------------------------------------------
/python/Search/BingImageSearchv7.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | # -*- coding: utf-8 -*-
5 |
6 | import json
7 | import os
8 | from pprint import pprint
9 | import requests
10 |
11 | '''
12 | This sample makes a call to the Bing Image Search API with a text query and returns relevant images with data.
13 | Documentation: https: // docs.microsoft.com/en-us/azure/cognitive-services/bing-web-search/
14 | '''
15 |
16 | # Add your Bing Search V7 subscription key and endpoint to your environment variables.
17 | subscriptionKey = os.environ['BING_SEARCH_V7_SUBSCRIPTION_KEY']
18 | endpoint = os.environ['BING_SEARCH_V7_ENDPOINT'] + "/bing/v7.0/images/search"
19 |
20 | # Query to search for
21 | query = "puppies"
22 |
23 | # Construct a request
24 | mkt = 'en-US'
25 | params = {'q': query, 'mkt': mkt}
26 | headers = {'Ocp-Apim-Subscription-Key': subscriptionKey}
27 |
28 | # Call the API
29 | try:
30 | response = requests.get(endpoint, headers=headers, params=params)
31 | response.raise_for_status()
32 |
33 | print("\nHeaders:\n")
34 | print(response.headers)
35 |
36 | print("\nJSON Response:\n")
37 | pprint(response.json())
38 | except Exception as ex:
39 | raise ex
40 |
--------------------------------------------------------------------------------
/python/Search/BingNewsSearchv7.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | # -*- coding: utf-8 -*-
5 |
6 | import json
7 | import os
8 | from pprint import pprint
9 | import requests
10 |
11 | '''
12 | This sample makes a call to the Bing News Search API with a text query and returns relevant news webpages.
13 | Documentation: https: // docs.microsoft.com/en-us/azure/cognitive-services/bing-web-search/
14 | '''
15 |
16 | # Add your Bing Search V7 subscription key and endpoint to your environment variables.
17 | subscriptionKey = os.environ['BING_SEARCH_V7_SUBSCRIPTION_KEY']
18 | endpoint = os.environ['BING_SEARCH_V7_ENDPOINT'] + "/bing/v7.0/news/search"
19 |
20 | query = "Microsoft"
21 |
22 | # Construct a request
23 | mkt = 'en-US'
24 | params = {'q': query, 'mkt': mkt}
25 | headers = {'Ocp-Apim-Subscription-Key': subscriptionKey}
26 |
27 | # Call the API
28 | try:
29 | response = requests.get(endpoint, headers=headers, params=params)
30 | response.raise_for_status()
31 |
32 | print("\nHeaders:\n")
33 | print(response.headers)
34 |
35 | print("\nJSON Response:\n")
36 | pprint(response.json())
37 | except Exception as ex:
38 | raise ex
39 |
--------------------------------------------------------------------------------
/python/Search/BingSpellCheckv7.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | import json
5 | import os
6 | from pprint import pprint
7 | import requests
8 | import urllib.parse
9 |
10 | '''
11 | This sample uses the Bing Spell Check API to check the spelling of query words
12 | and then suggests corrections with a scored confidence.
13 | Bing Spell Check API: https://dev.cognitive.microsoft.com/docs/services/5f7d486e04d2430193e1ca8f760cd7ed/operations/57855119bca1df1c647bc358
14 | '''
15 |
16 | # Add your Bing Spell Check subscription key and endpoint to your environment variables.
17 | key = os.environ['BING_SPELL_CHECK_SUBSCRIPTION_KEY']
18 | endpoint = os.environ['BING_SPELL_CHECK_ENDPOINT'] + '/bing/v7.0/spellcheck'
19 |
20 | # Query you want spell-checked.
21 | query = 'Hollo, wrld!'
22 |
23 | # Construct request
24 | params = urllib.parse.urlencode( { 'mkt': 'en-US', 'mode': 'proof', 'text': query } )
25 | headers = { 'Ocp-Apim-Subscription-Key': key,
26 | 'Content-Type': 'application/x-www-form-urlencoded' }
27 |
28 | # Optional headers
29 | #
30 | # X-MSEdge-ClientIP: 999.999.999.999
31 | # X-Search-Location: lat: +90.0000000000000;long: 00.0000000000000;re:100.000000000000
32 | # X-MSEdge-ClientID:
33 |
34 | # Call the API
35 | try:
36 | response = requests.get(endpoint, headers=headers, params=params)
37 | response.raise_for_status()
38 |
39 | print("\nHeaders:\n")
40 | print(response.headers)
41 |
42 | print("\nJSON Response:\n")
43 | pprint(response.json())
44 | except Exception as ex:
45 | raise ex
46 |
--------------------------------------------------------------------------------
/python/Search/BingVideoSearchv7.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | # -*- coding: utf-8 -*-
5 |
6 | import json
7 | import os
8 | from pprint import pprint
9 | import requests
10 |
11 | '''
12 | This sample makes a call to the Bing Video Search API with a topic query and returns relevant video with data.
13 | Documentation: https: // docs.microsoft.com/en-us/azure/cognitive-services/bing-web-search/
14 | '''
15 |
16 | # Add your Bing Search V7 subscription key and endpoint to your environment variables.
17 | subscriptionKey = os.environ['BING_SEARCH_V7_SUBSCRIPTION_KEY']
18 | endpoint = os.environ['BING_SEARCH_V7_ENDPOINT'] + "/bing/v7.0/videos/search"
19 |
20 | # Search query
21 | query = "kittens"
22 |
23 | # Construct a request
24 | headers = {
25 | 'Content-Type': 'application/json',
26 | 'Ocp-Apim-Subscription-Key': subscriptionKey
27 | }
28 | params = { "q": query }
29 |
30 | # Call the API
31 | try:
32 | response = requests.get(endpoint, headers=headers, params=params)
33 | response.raise_for_status()
34 |
35 | # Print results
36 | print("\nHeaders:\n")
37 | print(response.headers)
38 |
39 | print("\nJSON Response:\n")
40 | pprint(response.json())
41 | except Exception as ex:
42 | raise ex
43 |
--------------------------------------------------------------------------------
/python/Search/BingVisualSearchv7.py:
--------------------------------------------------------------------------------
1 | # Download and install Python at https://www.python.org/
2 | # Run the following in a command console window: pip3 install requests
3 |
4 | import json
5 | import os
6 | from pprint import pprint
7 | import requests
8 |
9 | '''
10 | This sample uses the Bing Visual Search API with a local, query image and returns several web links
11 | and data of the exact image and/or similar images.
12 | '''
13 |
14 | # Add your Bing Search V7 subscriptionKey and endpoint to your environment variables.
15 | endpoint = os.environ['BING_SEARCH_V7_ENDPOINT'] + '/bing/v7.0/images/visualsearch'
16 | subscription_key = os.environ['BING_SEARCH_V7_SUBSCRIPTION_KEY']
17 |
18 | image_path = 'MY-IMAGE' # for example: my_image.jpg
19 |
20 | # Construct the request
21 | headers = {'Ocp-Apim-Subscription-Key': subscription_key}
22 | file = {'image' : ('MY-IMAGE', open(image_path, 'rb'))} # MY-IMAGE is the name of the image file (no extention)
23 |
24 | # Call the API
25 | try:
26 | response = requests.post(endpoint, headers=headers, files=file)
27 | response.raise_for_status()
28 |
29 | print("\nHeaders:\n")
30 | print(response.headers)
31 |
32 | print("\nJSON Response:\n")
33 | pprint(response.json())
34 | except Exception as ex:
35 | raise ex
36 |
--------------------------------------------------------------------------------
/python/Search/BingWebSearchv7.py:
--------------------------------------------------------------------------------
1 | #Copyright (c) Microsoft Corporation. All rights reserved.
2 | #Licensed under the MIT License.
3 |
4 | # -*- coding: utf-8 -*-
5 |
6 | import json
7 | import os
8 | from pprint import pprint
9 | import requests
10 |
11 | # Add your Bing Search V7 subscription key and endpoint to your environment variables.
12 | subscription_key = os.environ['BING_SEARCH_V7_SUBSCRIPTION_KEY']
13 | endpoint = os.environ['BING_SEARCH_V7_ENDPOINT'] + "/bing/v7.0/search"
14 |
15 | # Query term(s) to search for.
16 | query = "Microsoft Cognitive Services"
17 |
18 | # Construct a request
19 | mkt = 'en-US'
20 | params = { 'q': query, 'mkt': mkt }
21 | headers = { 'Ocp-Apim-Subscription-Key': subscription_key }
22 |
23 | # Call the API
24 | try:
25 | response = requests.get(endpoint, headers=headers, params=params)
26 | response.raise_for_status()
27 |
28 | print("\nHeaders:\n")
29 | print(response.headers)
30 |
31 | print("\nJSON Response:\n")
32 | pprint(response.json())
33 | except Exception as ex:
34 | raise ex
35 |
--------------------------------------------------------------------------------
/python/Vision/ComputerVision2.1.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | import json
5 | import os
6 | from pprint import pprint
7 | import requests
8 |
9 | '''
10 | This sample makes a call to the Computer Vision API with a URL image query to analyze an image,
11 | and then returns user input parameter data like category, description, and color.
12 | API: https://westus.dev.cognitive.microsoft.com/docs/services/5adf991815e1060e6355ad44/operations/587f2c6a154055056008f200
13 | '''
14 |
15 | # Add your Computer Vision subscription key and endpoint to your environment variables.
16 | subscription_key = os.environ['COMPUTER_VISION_SUBSCRIPTION_KEY']
17 | endpoint = os.environ['COMPUTER_VISION_ENDPOINT'] + "/vision/v2.1/analyze"
18 |
19 | # Request headers.
20 | headers = {
21 | 'Content-Type': 'application/json',
22 | 'Ocp-Apim-Subscription-Key': subscription_key,
23 | }
24 |
25 | # Request parameters. All of them are optional.
26 | params = {
27 | 'visualFeatures': 'Categories,Description,Color',
28 | 'language': 'en',
29 | }
30 |
31 | # Any image with objects will work.
32 | body = {'url': 'https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/objects.jpg'}
33 |
34 | # Call the API.
35 | try:
36 | response = requests.post(endpoint, headers=headers, params=params, json=body)
37 | response.raise_for_status()
38 |
39 | print("\nHeaders:\n")
40 | print(response.headers)
41 |
42 | print("\nJSON Response:\n")
43 | pprint(response.json())
44 | except Exception as ex:
45 | raise ex
46 |
--------------------------------------------------------------------------------
/python/Vision/Face1.0.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | import json
5 | import requests
6 | import os
7 | from pprint import pprint
8 |
9 | '''
10 | This sample makes a call to the Face API with a URL image query to detect faces and features in an image.
11 | Face API: https://westus.dev.cognitive.microsoft.com/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236
12 | '''
13 |
14 | # Add your Face subscription key and endpoint to your environment variables.
15 | subscription_key = os.environ['FACE_SUBSCRIPTION_KEY']
16 | endpoint = os.environ['FACE_ENDPOINT'] + '/face/v1.0/detect'
17 |
18 | # Request headers.
19 | headers = {
20 | 'Content-Type': 'application/json',
21 | 'Ocp-Apim-Subscription-Key': subscription_key,
22 | }
23 |
24 | # Request parameters.
25 | params = {
26 | 'returnFaceId': 'true',
27 | 'returnFaceLandmarks': 'false',
28 | 'returnFaceAttributes': 'age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise',
29 | }
30 |
31 | # The URL of a JPEG image of faces to analyze.
32 | body = {'url': 'https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/Face/images/test-image-person-group.jpg'}
33 |
34 | try:
35 | # Call API.
36 | response = requests.post(endpoint, headers=headers, params=params, json=body)
37 | response.raise_for_status()
38 |
39 | print("\nHeaders:\n")
40 | print(response.headers)
41 |
42 | print("\nJSON Response:\n")
43 | pprint(response.json())
44 |
45 | except Exception as e:
46 | print('Error:')
47 | print(e)
48 |
--------------------------------------------------------------------------------
/python/Vision/README.md:
--------------------------------------------------------------------------------
1 | # Vision REST API samples
2 |
3 | These samples are quickstarts that show how to use various Vision APIs, such as Face.
4 |
5 | ## Prerequisites
6 | - Create an [Azure resource](https://portal.azure.com) for the service you'd like to try, for example a Face resource.
7 | - Add your key and endpoint from your resource to your environment variables with the variable names suggested in the sample.
8 |
9 | ## Running the samples
10 | Build/run in your IDE or from the command line, for example:
11 | `python Face1.0.py`
12 |
13 | ## Resources
14 | - Computer Vision documentation:
15 | https://docs.microsoft.com/en-us/azure/cognitive-services/computer-vision/home
16 | - Computer Vision 2.1 API:
17 | https://westus.dev.cognitive.microsoft.com/docs/services/5cd27ec07268f6c679a3e641/operations/56f91f2e778daf14a499f21b
18 | - Face documentation:
19 | https://docs.microsoft.com/en-us/azure/cognitive-services/face/index
20 | - Face 1.0 API:
21 | https://docs.microsoft.com/en-us/azure/cognitive-services/face/apireference
22 |
--------------------------------------------------------------------------------
/ruby/Search/BingAutosuggestv7.rb:
--------------------------------------------------------------------------------
1 | require 'net/https'
2 | require 'uri'
3 | require 'json'
4 |
5 | # **********************************************
6 | # *** Update or verify the following values. ***
7 | # **********************************************
8 |
9 | # Replace the subscriptionKey string value with your valid subscription key.
10 | subscriptionKey = 'enter key here'
11 |
12 | host = 'https://api.cognitive.microsoft.com'
13 | path = '/bing/v7.0/Suggestions'
14 |
15 | mkt = 'en-US'
16 | query = 'sail'
17 |
18 | params = '?mkt=' + mkt + '&q=' + query
19 | uri = URI (host + path + params)
20 |
21 | request = Net::HTTP::Get.new(uri)
22 | request['Ocp-Apim-Subscription-Key'] = subscriptionKey
23 |
24 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
25 | http.request (request)
26 | end
27 |
28 | puts JSON::pretty_generate (JSON (response.body))
--------------------------------------------------------------------------------
/ruby/Search/BingEntitySearchv7.rb:
--------------------------------------------------------------------------------
1 | require 'net/https'
2 | require 'cgi'
3 | require 'json'
4 |
5 | # **********************************************
6 | # *** Update or verify the following values. ***
7 | # **********************************************
8 |
9 | # Replace the subscriptionKey string value with your valid subscription key.
10 | subscriptionKey = 'ENTER KEY HERE'
11 |
12 | host = 'https://api.cognitive.microsoft.com'
13 | path = '/bing/v7.0/entities'
14 |
15 | mkt = 'en-US'
16 | query = 'italian restaurants near me'
17 |
18 | params = '?mkt=' + mkt + '&q=' + CGI.escape(query)
19 | uri = URI (host + path + params)
20 |
21 | request = Net::HTTP::Get.new(uri)
22 | request['Ocp-Apim-Subscription-Key'] = subscriptionKey
23 |
24 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
25 | http.request (request)
26 | end
27 |
28 | puts JSON::pretty_generate (JSON (response.body))
--------------------------------------------------------------------------------
/ruby/Search/BingImageSearchv7.rb:
--------------------------------------------------------------------------------
1 | require 'net/https'
2 | require 'uri'
3 | require 'json'
4 |
5 | # **********************************************
6 | # *** Update or verify the following values. ***
7 | # **********************************************
8 |
9 | # Replace the accessKey string value with your valid access key.
10 | accessKey = "enter your key here"
11 |
12 | # Verify the endpoint URI. At this writing, only one endpoint is used for Bing
13 | # search APIs. In the future, regional endpoints may be available. If you
14 | # encounter unexpected authorization errors, double-check this value against
15 | # the endpoint for your Bing Search instance in your Azure dashboard.
16 |
17 | uri = "https://api.cognitive.microsoft.com"
18 | path = "/bing/v7.0/images/search"
19 |
20 | term = "tropical ocean"
21 |
22 | if accessKey.length != 32 then
23 | puts "Invalid Bing Search API subscription key!"
24 | puts "Please paste yours into the source code."
25 | abort
26 | end
27 |
28 | uri = URI(uri + path + "?q=" + URI.escape(term))
29 |
30 | puts "Searching images for: " + term
31 |
32 | request = Net::HTTP::Get.new(uri)
33 | request['Ocp-Apim-Subscription-Key'] = accessKey
34 |
35 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
36 | http.request(request)
37 | end
38 |
39 | puts "\nJSON Response:\n\n"
40 |
41 | parsed_json = JSON.parse(response.body)
42 | total_returned_images = parsed_json["totalEstimatedMatches"]
43 | first_result = parsed_json["value"][0]["thumbnailUrl"]
44 |
45 | puts "total number of returned matches: #{total_returned_images}"
46 | puts "Url to the thumnail of the first returned search result: #{first_result}"
--------------------------------------------------------------------------------
/ruby/Search/BingNewsSearchv7.rb:
--------------------------------------------------------------------------------
1 | require 'net/https'
2 | require 'uri'
3 | require 'json'
4 |
5 | # **********************************************
6 | # *** Update or verify the following values. ***
7 | # **********************************************
8 |
9 | # Replace the accessKey string value with your valid access key.
10 | accessKey = "enter key here"
11 |
12 | # Verify the endpoint URI. At this writing, only one endpoint is used for Bing
13 | # search APIs. In the future, regional endpoints may be available. If you
14 | # encounter unexpected authorization errors, double-check this value against
15 | # the endpoint for your Bing Search instance in your Azure dashboard.
16 |
17 | uri = "https://api.cognitive.microsoft.com"
18 | path = "/bing/v7.0/news/search"
19 |
20 | term = "Microsoft"
21 |
22 | uri = URI(uri + path + "?q=" + URI.escape(term))
23 |
24 | puts "Searching news for: " + term
25 |
26 | request = Net::HTTP::Get.new(uri)
27 | request['Ocp-Apim-Subscription-Key'] = accessKey
28 |
29 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
30 | http.request(request)
31 | end
32 |
33 | puts "\nRelevant Headers:\n\n"
34 | response.each_header do |key, value|
35 | # header names are coerced to lowercase
36 | if key.start_with?("bingapis-") or key.start_with?("x-msedge-") then
37 | puts key + ": " + value
38 | end
39 | end
40 |
41 | puts "\nJSON Response:\n\n"
42 | puts JSON::pretty_generate(JSON(response.body))
--------------------------------------------------------------------------------
/ruby/Search/BingSpellCheckv7.rb:
--------------------------------------------------------------------------------
1 | require 'net/http'
2 | require 'uri'
3 | require 'json'
4 |
5 | uri = 'https://api.cognitive.microsoft.com'
6 | path = '/bing/v7.0/spellcheck?'
7 | params = 'mkt=en-us&mode=proof'
8 |
9 | uri = URI(uri + path + params)
10 | uri.query = URI.encode_www_form({
11 | # Request parameters
12 | 'text' => 'Hollo, wrld!'
13 | })
14 |
15 | # NOTE: Replace this example key with a valid subscription key.
16 | key = 'ENTER KEY HERE'
17 |
18 | # The headers in the following example
19 | # are optional but should be considered as required:
20 | #
21 | # X-MSEdge-ClientIP: 999.999.999.999
22 | # X-Search-Location: lat: +90.0000000000000;long: 00.0000000000000;re:100.000000000000
23 | # X-MSEdge-ClientID:
24 | #
25 |
26 | request = Net::HTTP::Post.new(uri)
27 | request['Content-Type'] = "application/x-www-form-urlencoded"
28 |
29 | request['Ocp-Apim-Subscription-Key'] = key
30 |
31 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
32 | http.request(request)
33 | end
34 |
35 | result = JSON.pretty_generate(JSON.parse(response.body))
36 | puts result
--------------------------------------------------------------------------------
/ruby/Search/BingVideoSearchv7.rb:
--------------------------------------------------------------------------------
1 | require 'net/https'
2 | require 'uri'
3 | require 'json'
4 |
5 | # **********************************************
6 | # *** Update or verify the following values. ***
7 | # **********************************************
8 |
9 | # Replace the accessKey string value with your valid access key.
10 | accessKey = "enter key here"
11 |
12 | # Verify the endpoint URI. At this writing, only one endpoint is used for Bing
13 | # search APIs. In the future, regional endpoints may be available. If you
14 | # encounter unexpected authorization errors, double-check this value against
15 | # the endpoint for your Bing Search instance in your Azure dashboard.
16 |
17 | uri = "https://api.cognitive.microsoft.com"
18 | path = "/bing/v7.0/videos/search"
19 |
20 | term = "kittens"
21 |
22 | uri = URI(uri + path + "?q=" + URI.escape(term))
23 |
24 | puts "Searching videos for: " + term
25 |
26 | request = Net::HTTP::Get.new(uri)
27 | request['Ocp-Apim-Subscription-Key'] = accessKey
28 |
29 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
30 | http.request(request)
31 | end
32 |
33 | puts "\nRelevant Headers:\n\n"
34 | response.each_header do |key, value|
35 | # header names are coerced to lowercase
36 | if key.start_with?("bingapis-") or key.start_with?("x-msedge-") then
37 | puts key + ": " + value
38 | end
39 | end
40 |
41 | puts "\nJSON Response:\n\n"
42 | puts JSON::pretty_generate(JSON(response.body))
--------------------------------------------------------------------------------
/ruby/Search/BingWebSearchv7.rb:
--------------------------------------------------------------------------------
1 | require 'net/https'
2 | require 'uri'
3 | require 'json'
4 |
5 | # Replace with a valid subscription key from your Azure account.
6 | accessKey = "enter key here"
7 | uri = "https://api.cognitive.microsoft.com"
8 | path = "/bing/v7.0/search"
9 |
10 | term = "Microsoft Cognitive Services"
11 |
12 | # Validate the subscription key.
13 | if accessKey.length != 32 then
14 | puts "Invalid Bing Search API subscription key!"
15 | puts "Please paste yours into the source code."
16 | abort
17 | end
18 | # Construct the endpoint uri.
19 | uri = URI(uri + path + "?q=" + URI.escape(term))
20 |
21 | puts "Searching the Web for: " + term
22 |
23 | # Create the request.
24 | request = Net::HTTP::Get.new(uri)
25 | request['Ocp-Apim-Subscription-Key'] = accessKey
26 |
27 | # Get the response.
28 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
29 | http.request(request)
30 | end
31 |
32 | puts "\nRelevant Headers:\n\n"
33 | response.each_header do |key, value|
34 | # Header names are lower-cased.
35 | if key.start_with?("bingapis-") or key.start_with?("x-msedge-") then
36 | puts key + ": " + value
37 | end
38 | end
39 |
40 | # Print the response.
41 | puts "\nJSON Response:\n\n"
42 | puts JSON::pretty_generate(JSON(response.body))
43 |
--------------------------------------------------------------------------------
/swift/InkRecognition/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | topic: sample
3 | languages:
4 | - swift
5 | products:
6 | - ios
7 | - azure
8 | ---
9 |
10 | # Ink Recognizer Cognitive Service Swift (on IOS) Sample
11 |  
12 |
13 | Ink Recognizer Cognitive Service provides recognition of digital ink. It takes the digital ink stroke data as input and provides a document tree with individual recognition units as output. This project has sample code to demonstrate a few ways developers can take advantage of the service.
14 |
15 | ## Features
16 |
17 | This project provides the following features:
18 |
19 | * Capturing very basic inking input.
20 | * Creating the JSON payload using the Ink Recognizer JSON schema.
21 | * Asynchronously calling the Ink Recognizer REST APIs with the JSON payload
22 | * Parsing the JSON response from the service to build the document tree.
23 |
24 | ## Contents
25 |
26 | | File/folder | Description |
27 | |-------------|-------------|
28 | | `Recognizer` | Sample source code. |
29 | | `README.md` | This README file. |
30 | | `LICENSE` | The license for the sample. |
31 |
32 | ## Getting Started
33 |
34 | ### Prerequisites
35 |
36 | XCode 10 on macOS Mojave
37 |
38 |
39 | ### Quickstart
40 | (Add steps to get up and running quickly)
41 |
42 | 1. git clone https://github.com/Azure-Samples/cognitive-services-REST-api-samples
43 | 2. cd [respository name]
44 | 3. Navigate to the project directory (`swift/InkRecognition`) and select the xcode project file.
45 | 4. Replace `` in InkRendererView.swift with your valid subscription key
46 |
47 | ## Demo
48 |
49 | 1. Select a virtual device in Xcode
50 | 2. Build and run the project
51 | 3. Write something on the screen of the virtual device once it is loaded.
52 | 4. After 2 seconds of inactivity, the ink will be recognized and the result will be visible in the text control at the bottom of the screen.
53 |
54 |
55 | ## Resources
56 |
57 | Additional resources related the project are located below
58 |
59 | - [Learn more about Ink Recognizer](http://go.microsoft.com/fwlink/?LinkID=2084782)
60 | - [Ink Recognizer API Reference](http://go.microsoft.com/fwlink/?LinkID=2085147)
61 | - [Ink Recognizer JavaScript sample](https://github.com/azure-samples/cognitive-services-javascript-ink-recognition)
62 | - [Ink Recognizer UWP sample](https://github.com/azure-samples/cognitive-services-csharp-ink-recognition)
63 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer.xcodeproj/project.xcworkspace/xcuserdata/elijah.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/cognitive-services-REST-api-samples/4a3c5ca76318feb3f7cfb109e1f79c542eb14d01/swift/InkRecognition/Recognizer/Recognizer.xcodeproj/project.xcworkspace/xcuserdata/elijah.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer.xcodeproj/xcuserdata/elijah.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | Recognizer.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 0
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/AppDelegate.swift:
--------------------------------------------------------------------------------
1 |
2 | import UIKit
3 |
4 | @UIApplicationMain
5 | class AppDelegate: UIResponder, UIApplicationDelegate {
6 |
7 | var window: UIWindow?
8 |
9 |
10 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
11 | // Override point for customization after application launch.
12 | return true
13 | }
14 |
15 | func applicationWillResignActive(_ application: UIApplication) {
16 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
17 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
18 | }
19 |
20 | func applicationDidEnterBackground(_ application: UIApplication) {
21 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
22 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
23 | }
24 |
25 | func applicationWillEnterForeground(_ application: UIApplication) {
26 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
27 | }
28 |
29 | func applicationDidBecomeActive(_ application: UIApplication) {
30 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
31 | }
32 |
33 | func applicationWillTerminate(_ application: UIApplication) {
34 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
35 | }
36 |
37 |
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | UILaunchStoryboardName
24 | LaunchScreen
25 | UIMainStoryboardFile
26 | Main
27 | UIRequiredDeviceCapabilities
28 |
29 | armv7
30 |
31 | UISupportedInterfaceOrientations
32 |
33 | UIInterfaceOrientationPortrait
34 | UIInterfaceOrientationLandscapeLeft
35 | UIInterfaceOrientationLandscapeRight
36 |
37 | UISupportedInterfaceOrientations~ipad
38 |
39 | UIInterfaceOrientationPortrait
40 | UIInterfaceOrientationPortraitUpsideDown
41 | UIInterfaceOrientationLandscapeLeft
42 | UIInterfaceOrientationLandscapeRight
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkBullet.swift:
--------------------------------------------------------------------------------
1 |
2 | import Foundation
3 |
4 | @objc
5 | class InkBullet: InkRecognitionUnit {
6 |
7 | var text: String!
8 |
9 | @objc
10 | override init(json : [String:Any]) {
11 | self.text = json["recognizedText"] as? String ?? ""
12 | super.init(json:json)
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkLine.swift:
--------------------------------------------------------------------------------
1 |
2 | import Foundation
3 | @objc
4 | class InkLine: InkRecognitionUnit {
5 |
6 | var alternates = [[String:Any]]()
7 | var indentLevel = 0.0
8 | var text: String!
9 |
10 | @objc
11 | override init(json: [String: Any])
12 | {
13 | self.text = json["recognizedText"] as? String ?? ""
14 | if let alternates = json["alternates"] as? [[String: Any]] {
15 | self.alternates = alternates
16 | }
17 | super.init(json: json)
18 | }
19 |
20 | @objc
21 | func getAlternates() -> [String]
22 | {
23 | var wordAlternates = [String]()
24 | for jsonAlternate in self.alternates {
25 | let wordAlternate = jsonAlternate["recognizedString"] as? String ?? ""
26 | wordAlternates.append(wordAlternate)
27 | }
28 | return wordAlternates
29 | }
30 | }
31 |
32 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkListItem.swift:
--------------------------------------------------------------------------------
1 |
2 | import Foundation
3 |
4 | @objc
5 | class InkListItem: InkRecognitionUnit {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkParagraph.swift:
--------------------------------------------------------------------------------
1 |
2 | import Foundation
3 |
4 | @objc
5 | class InkParagraph: InkRecognitionUnit {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkPoint.swift:
--------------------------------------------------------------------------------
1 |
2 |
3 | import Foundation
4 | import UIKit
5 |
6 | @objc
7 | class InkPoint: NSObject, Decodable {
8 | var x: Float = 0;
9 | var y: Float = 0;
10 |
11 | //Value for the iphone XR. Change to match your target devices with a table of values for each device.
12 | static let iPhoneXRPPI : Float = 326.0
13 | static let iPhoneXRScalingFactor: Float = 2.0
14 | static let mmPerInch : Float = 25.4
15 | @objc
16 | init(x: Float, y: Float) {
17 | self.x = x
18 | self.y = y
19 | }
20 |
21 | @objc
22 | static func millimeterToCGFloat(mmValue: Float) -> CGFloat {
23 | return CGFloat(mmValue/InkPoint.mmPerInch*InkPoint.iPhoneXRPPI)
24 | }
25 |
26 | @objc
27 | init(point: CGPoint) {
28 | //Scale to pixels and then convert to millimeters
29 | self.x = Float(point.x)*InkPoint.iPhoneXRScalingFactor/InkPoint.iPhoneXRPPI*InkPoint.mmPerInch
30 | self.y = Float(point.y)*InkPoint.iPhoneXRScalingFactor/InkPoint.iPhoneXRPPI*InkPoint.mmPerInch
31 | }
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkStroke.swift:
--------------------------------------------------------------------------------
1 |
2 | import Foundation
3 |
4 | @objc
5 | enum StrokeKind: Int {
6 | case drawing,
7 | writing,
8 | unknown
9 | }
10 |
11 | @objc
12 | class InkStroke: NSObject, Encodable {
13 | var language: String = "en-US"
14 | var id: Int!
15 | var kind: StrokeKind
16 | var inkPoints = [InkPoint]()
17 | var points: String = ""
18 | static var num: Int = 0
19 | var strKind: String = "unknown"
20 |
21 |
22 | enum CodingKeys: CodingKey {
23 | case language
24 | case id
25 | case points
26 | case strKind
27 | }
28 |
29 | @objc
30 | init( language: String, kind: StrokeKind = StrokeKind.unknown) {
31 | id = InkStroke.getNextNumber()
32 | self.language = language
33 | self.kind = kind
34 | switch kind {
35 | case StrokeKind.drawing:
36 | strKind = "inkDrawing"
37 | case StrokeKind.writing:
38 | strKind = "inkWriting"
39 | default:
40 | print("kind is unknown")
41 | }
42 | }
43 |
44 | @objc
45 | func addPoint(point: InkPoint) {
46 | inkPoints.append(point)
47 | if points == "" {
48 | points.append(String(Float(point.x)) + "," + String(Float(point.y)))
49 | } else {
50 | points.append("," + String(Float(point.x)) + "," + String(Float(point.y)))
51 | }
52 | }
53 |
54 | func encode(to encoder: Encoder) throws {
55 | var container = encoder.container(keyedBy: CodingKeys.self)
56 | try container.encode(language, forKey: .language)
57 | try container.encode(id, forKey: .id)
58 | try container.encode(points, forKey: .points)
59 | try container.encode(strKind, forKey: .strKind)
60 |
61 | }
62 |
63 | static func getNextNumber() -> Int {
64 | num += 1
65 | return num
66 | }
67 | }
68 |
69 |
70 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkWord.swift:
--------------------------------------------------------------------------------
1 |
2 |
3 | import Foundation
4 |
5 | @objc
6 | class InkWord: InkRecognitionUnit {
7 |
8 | var alternates = [[String: Any]]()
9 | var text: String!
10 |
11 | @objc
12 | override init(json: [String: Any]) {
13 | print(json)
14 | self.text = json["recognizedText"] as? String ?? ""
15 | self.alternates = json["alternates"] as! [[String: Any]]
16 | super.init(json:json)
17 | }
18 |
19 | @objc
20 | func getAlternates() -> [String] {
21 | var wordAlternates = [String]()
22 | for jsonAlternate in self.alternates {
23 | let wordAlternate = jsonAlternate["recognizedString"] as? String ?? ""
24 | wordAlternates.append(wordAlternate)
25 | }
26 | return wordAlternates
27 | }
28 | }
29 |
30 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/InkWritingRegion.swift:
--------------------------------------------------------------------------------
1 |
2 |
3 | import Foundation
4 | @objc
5 | class InkWritingRegion: InkRecognitionUnit {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/Line.swift:
--------------------------------------------------------------------------------
1 |
2 | import UIKit
3 |
4 | class Line {
5 | var start: CGPoint
6 | var end: CGPoint
7 |
8 | init(start: CGPoint, end: CGPoint) {
9 | self.start = start
10 | self.end = end
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/swift/InkRecognition/Recognizer/Recognizer/ViewController.swift:
--------------------------------------------------------------------------------
1 |
2 | import UIKit
3 |
4 | class ViewController: UIViewController {
5 |
6 | @IBOutlet weak var recognitionResult: UILabel!
7 | override func viewDidLoad() {
8 | self.recognitionResult.text = ""
9 | super.viewDidLoad()
10 | }
11 | }
12 |
13 |
--------------------------------------------------------------------------------