├── .github ├── CODE_OF_CONDUCT.md ├── ISSUE_TEMPLATE.md └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE.md ├── README.md ├── cpp ├── ComputerVision │ └── ImageAnalysisQuickstart-single-4-0.cpp └── speech │ └── speaker-recognition.cpp ├── curl ├── FormRecognizer │ └── v3-rest-sdk-read-output.md ├── QnAMaker │ ├── create-knowledge-base.sh │ ├── delete-knowledge-base.sh │ ├── get-answer-from-runtime.sh │ ├── get-operation-status.sh │ ├── get-query-endpoint-key.sh │ ├── publish-knowledge-base.sh │ └── readme.md ├── content-moderator │ └── quickstart.sh ├── custom-vision │ ├── image-classifier.sh │ └── object-detector.sh ├── face │ ├── detect.ps1 │ └── detect.sh ├── management.md └── speech │ ├── get-voices.sh │ ├── speaker-recognition.sh │ ├── speech-to-text.sh │ └── text-to-speech.sh ├── dotnet ├── BingAutoSuggest │ └── Program.cs ├── BingSpellCheck │ └── BingSpellCheckQuickstart.cs ├── ComputerVision │ ├── 4-0 │ │ ├── image-analysis-how-to │ │ │ ├── Program.cs │ │ │ ├── image-analysis-how-to.csproj │ │ │ └── sample.jpg │ │ └── image-analysis-quickstart │ │ │ ├── Program.cs │ │ │ └── image-analysis-quickstart.csproj │ ├── ComputerVisionQuickstart-single.cs │ ├── ComputerVisionQuickstart.cs │ ├── ImageAnalysisQuickstart-single.cs │ ├── ImageAnalysisQuickstart.cs │ ├── ImageCaptioningQuickstart.cs │ ├── REST │ │ ├── CSharp-analyze.md │ │ ├── CSharp-hand-text.md │ │ ├── CSharp-print-text.md │ │ └── CSharp-thumb.md │ └── storage-lab-tutorial │ │ ├── css │ │ └── lightbox.css │ │ └── scripts │ │ └── lightbox.js ├── ContentModerator │ └── Program.cs ├── CustomVision │ ├── ImageClassification │ │ └── Program.cs │ └── ObjectDetection │ │ └── Program.cs ├── Face │ ├── Detect.cs │ ├── FindSimilar.cs │ ├── Quickstart.cs │ ├── cognsvcsdk │ │ ├── FaceQuickstart-single.cs │ │ ├── FaceQuickstart.cs │ │ ├── analyze.cs │ │ ├── detect.cs │ │ └── identify.cs │ └── rest │ │ └── detect.cs ├── FormRecognizer │ ├── FormRecognizerQuickstart-preview.cs │ ├── FormRecognizerQuickstart.cs │ ├── csharp-sdk-quickstart.cs │ ├── how-to-guide │ │ ├── business-card-model-output.md │ │ ├── general-document-model-output.md │ │ ├── id-document-model-output.md │ │ ├── invoice-model-output.md │ │ ├── layout-model-output.md │ │ ├── read-model-output.md │ │ ├── receipt-model-output.md │ │ └── w2-tax-model-output.md │ ├── v0.8.0-project │ │ ├── App.config │ │ ├── FormRecognizer.csproj │ │ ├── FormRecognizer.sln │ │ ├── Program.cs │ │ ├── Properties │ │ │ └── AssemblyInfo.cs │ │ └── packages.config │ ├── v3-csharp-quickstart-general-document-output.md │ ├── v3-csharp-quickstart-layout-output.md │ ├── v3-csharp-quickstart-prebuilt-invoice-output.md │ └── v3-csharp-quickstart-read-output.md ├── LanguageUnderstanding │ ├── authoring │ │ └── authoring-with-sdk.cs │ ├── csharp-model-with-rest │ │ ├── Program.cs │ │ └── csharp-model-with-rest.csproj │ ├── csharp-predict-with-rest │ │ ├── Program.cs │ │ └── csharp-predict-with-rest.csproj │ ├── predict-with-sdk-3x │ │ ├── Program.cs │ │ ├── predict-with-sdk-3x.csproj │ │ └── readme.md │ └── sdk-3x │ │ ├── .devcontainer │ │ ├── Dockerfile │ │ └── devcontainer.json │ │ ├── LUIS_SDK_3x.csproj │ │ └── Program.cs ├── Personalizer │ ├── multislot-quickstart-v2PreviewSdk │ │ ├── Program.cs │ │ ├── README.md │ │ └── multislot-quickstart.csproj │ ├── quickstart-sdk │ │ └── personalizer-quickstart.cs │ ├── singleslot-quickstart-v1Sdk │ │ ├── Program.cs │ │ ├── README.md │ │ └── personalizer.csproj │ └── singleslot-quickstart-v2PreviewSdk │ │ ├── Program.cs │ │ ├── README.md │ │ └── personalizer.csproj ├── QnAMaker │ ├── Preview-sdk-based-quickstart │ │ ├── Program.cs │ │ └── console.csproj │ ├── SDK-based-quickstart │ │ ├── Program.cs │ │ └── console.csproj │ ├── rest-based-quickstart │ │ ├── Program.cs │ │ └── rest-based-quickstart.csproj │ ├── rest │ │ ├── create-kb.cs │ │ ├── publish-kb.cs │ │ └── query-kb.cs │ └── sdk │ │ ├── ConsoleApp1.csproj │ │ └── Program.cs ├── TextAnalytics │ └── program.cs └── azure_management_service │ ├── create_delete_resource.cs │ └── create_delete_resource_preview.cs ├── go ├── AnomalyDetector │ ├── AnomalyDetectorQuickstart.go │ └── request-data.csv ├── BingAutoSuggest │ └── BingAutoSuggestQuickstart.go ├── BingCustomSearch │ └── quickstart.go ├── BingLocal │ └── quickstart.go ├── BingSpellCheck │ └── BingSpellCheckQuickstart.go ├── ComputerVision │ ├── ComputerVisionQuickstart-single.go │ ├── ComputerVisionQuickstart.go │ ├── ImageAnalysisQuickstart-single.go │ ├── ImageAnalysisQuickstart.go │ └── REST │ │ ├── go-analyze.md │ │ ├── go-print-text.md │ │ └── go-thumb.md ├── ContentModerator │ ├── ContentModeratorQuickstart.go │ └── content_moderator_text_moderation.txt ├── Face │ ├── cognsvcsdk │ │ ├── FaceQuickstart.go │ │ └── README.md │ └── rest │ │ └── detect.go ├── LUIS │ ├── go-rest-model │ │ └── model.go │ ├── go-rest-predict │ │ └── predict.go │ ├── prediction │ │ └── quickstart.go │ └── quickstart.go ├── Personalizer │ └── PersonalizerQuickstart.go ├── QnAMaker │ ├── rest-based-quickstart │ │ └── quickstart.go │ ├── rest │ │ ├── create-kb.go │ │ ├── publish-kb.go │ │ └── query-kb.go │ └── sdk │ │ └── kb_sample.go ├── README.md ├── TextAnalytics │ └── REST │ │ ├── AnalyzeSentiment.go │ │ ├── DetectLanguage.go │ │ ├── ExtractKeyPhrases.go │ │ └── IdentifyEntities.go └── azure_resource_management │ └── create_delete_resource.go ├── java ├── AutoSuggest │ ├── Quickstart.java │ └── pom.xml ├── BingSpellCheck │ ├── README.md │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── BingSpellCheckQuickstart.java ├── ComputerVision │ ├── 4-0 │ │ ├── ImageAnalysisHowTo.java │ │ ├── ImageAnalysisQuickStart.java │ │ ├── pom.xml │ │ └── sample.png │ ├── README.md │ ├── REST │ │ ├── java-analyze.md │ │ ├── java-hand-text.md │ │ ├── java-print-text.md │ │ └── java-thumb.md │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ ├── ComputerVisionQuickstart-single.java │ │ ├── ComputerVisionQuickstart.java │ │ ├── ImageAnalysisQuickstart-single.java │ │ └── ImageAnalysisQuickstart.java ├── ContentModerator │ ├── pom.xml │ ├── src │ │ └── main │ │ │ ├── java │ │ │ └── ContentModeratorQuickstart.java │ │ │ └── resources │ │ │ ├── ImageFiles.txt │ │ │ └── TextModeration.txt │ └── target │ │ └── classes │ │ ├── ImageFiles.txt │ │ └── TextModeration.txt ├── CustomVision │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── microsoft │ │ └── azure │ │ └── cognitiveservices │ │ └── vision │ │ └── customvision │ │ └── samples │ │ └── CustomVisionSamples.java ├── Face │ ├── Quickstart.java │ ├── cognsvcsdk │ │ ├── FaceQuickstart.java │ │ ├── README.md │ │ ├── dependencies.txt │ │ ├── faceapi-dependencies.jar │ │ └── ms-azure-cs-faceapi.jar │ ├── pom.xml │ └── rest │ │ └── detect.java ├── FormRecognizer │ ├── FormRecognizer-preview.java │ ├── FormRecognizer.java │ ├── how-to-guide │ │ ├── business-card-model-output.md │ │ ├── general-document-model-output.md │ │ ├── id-document-output.md │ │ ├── invoice-model-output.md │ │ ├── layout-model-output.md │ │ ├── read-model-output.md │ │ ├── receipt-model-output.md │ │ └── w2-tax-model-output.md │ ├── v3-java-sdk-general-document-output.md │ ├── v3-java-sdk-layout-output.md │ ├── v3-java-sdk-prebuilt-invoice-output.md │ └── v3-java-sdk-read-output.md ├── LUIS │ ├── authoring │ │ └── quickstart.java │ ├── java-model-with-rest │ │ ├── Model.class │ │ ├── Model.java │ │ └── lib │ │ │ ├── commons-logging-1.2.jar │ │ │ ├── httpclient-4.5.3.jar │ │ │ └── httpcore-4.4.6.jar │ ├── java-predict-with-rest │ │ ├── Predict.class │ │ ├── Predict.java │ │ └── lib │ │ │ ├── commons-logging-1.2.jar │ │ │ ├── httpclient-4.5.3.jar │ │ │ └── httpcore-4.4.6.jar │ └── prediction │ │ └── quickstart.java ├── TextAnalytics │ ├── REST │ │ ├── DetectLanguage.java │ │ ├── GetEntities.java │ │ ├── GetKeyPhrases.java │ │ └── GetSentiment.java │ └── TextAnalyticsSamples.java ├── azure_management_service │ ├── pom.xml │ └── quickstart.java └── qnamaker │ ├── rest │ ├── CreateKB.java │ ├── PublishKB.java │ ├── QueryKB.java │ └── pom.xml │ └── sdk │ ├── pom.xml │ ├── preview-sdk │ └── quickstart.java │ └── quickstart.java ├── javascript ├── AnomalyDetector │ ├── anomaly_detector_quickstart.js │ └── request-data.csv ├── AutoSuggest │ ├── autoSuggest.js │ └── package-lock.json ├── BingLocal │ └── quickstart.js ├── BingSpellCheck │ └── bing_spell_check_quickstart.js ├── ComputerVision │ ├── 4-0 │ │ ├── how-to.js │ │ └── quickstart.js │ ├── ComputerVisionQuickstart-single.js │ ├── ComputerVisionQuickstart.js │ ├── ImageAnalysisQuickstart-single.js │ ├── ImageAnalysisQuickstart.js │ ├── ImageCaptioningQuickstart.js │ ├── REST │ │ ├── javascript-analyze.md │ │ ├── javascript-hand-text.md │ │ ├── javascript-print-text.md │ │ ├── javascript-thumb.md │ │ ├── node-analyze.md │ │ ├── node-print-text.md │ │ └── node-thumb.md │ ├── celebrities.jpg │ ├── package-lock.json │ └── package.json ├── ContentModerator │ ├── content_moderator_quickstart.js │ └── text_file.txt ├── CustomSearch │ └── bing_custom_search_quickstart.js ├── CustomVision │ ├── ImageClassification │ │ └── CustomVisionQuickstart.js │ └── ObjectDetection │ │ └── CustomVisionQuickstart.js ├── Face │ ├── Quickstart.js │ ├── cognsvcsdk │ │ ├── FaceQuickstart.js │ │ ├── sdk_quickstart-single.js │ │ └── sdk_quickstart.js │ └── rest │ │ ├── detect.js │ │ └── js-native-quickstart.md ├── FormRecognizer │ ├── FormRecognizerQuickstart.js │ ├── how-to-guide │ │ ├── business-card-model-output.md │ │ ├── general-document-model-output.md │ │ ├── id-document-output.md │ │ ├── invoice-model-output.md │ │ ├── layout-model-output.md │ │ ├── read-model-output.md │ │ ├── receipt-model-output.md │ │ └── w2-tax-model-output.md │ ├── v3-javascript-sdk-general-document-output.md │ ├── v3-javascript-sdk-layout-output.md │ ├── v3-javascript-sdk-prebuilt-invoice-output.md │ └── v3-javascript-sdk-read-output.md ├── LUIS │ ├── node-model-with-rest │ │ ├── model.js │ │ ├── package-lock.json │ │ └── package.json │ ├── node-predict-with-rest │ │ ├── package-lock.json │ │ ├── package.json │ │ └── predict.js │ ├── node-sdk-authoring-prediction │ │ ├── luis_authoring_quickstart.js │ │ ├── luis_prediction.js │ │ ├── package.json │ │ └── readme.md │ └── sdk-3x │ │ ├── .devcontainer │ │ ├── Dockerfile │ │ └── devcontainer.json │ │ ├── index.js │ │ └── package.json ├── Personalizer │ ├── README.md │ ├── multislot-quickstart │ │ ├── README.md │ │ ├── package.json │ │ └── sample.js │ ├── package.json │ ├── quickstart-sdk │ │ └── personalizer-quickstart.js │ └── sample.js ├── QnAMaker │ ├── rest-based-quickstart │ │ ├── package.json │ │ └── rest-based-quickstart.js │ ├── rest │ │ ├── create-publish-kb.js │ │ ├── publish-kb.js │ │ └── query-kb.js │ └── sdk │ │ ├── package.json │ │ ├── preview-sdk │ │ └── quickstart.js │ │ ├── qnamaker_quickstart.js │ │ └── readme.md ├── TextAnalytics │ ├── REST │ │ ├── rest-api-analyze-sentiment.js │ │ ├── rest-api-detect-language.js │ │ ├── rest-api-entity-linking.js │ │ └── rest-api-extract-key-phrases.js │ └── text-analytics-v3-client-library.js ├── TranslatorText │ ├── BreakSentence.js │ ├── Detect.js │ ├── DictionaryExamples.js │ ├── DictionaryLookup.js │ ├── Languages.js │ ├── Translate.js │ └── Transliterate.js ├── azure_management_service │ └── create_delete_resource.js ├── speech │ └── speaker-recognition.js └── web │ └── face │ └── rest │ └── detect.html ├── php ├── ComputerVision │ ├── analyze-image │ │ ├── analyze-image.php │ │ └── readme.md │ ├── extract-printed-text │ │ ├── get-printed-text.php │ │ └── readme.md │ ├── generate-thumbnail │ │ ├── get-thumbnail.php │ │ └── readme.md │ └── use-domain-model │ │ ├── readme.md │ │ └── use-domain-model.php ├── TextAnalytics │ └── REST │ │ ├── AnalyzeSentiment.php │ │ ├── DetectLanguage.php │ │ ├── GetKeyPhrases.php │ │ └── IdentifyEntities.php └── face │ └── rest │ └── detect.php ├── powershell └── management.ps1 ├── python ├── AutoSuggest │ └── Quickstart.py ├── BingSpellCheck │ └── bing_spell_check_quickstart.py ├── BingWebSearch │ └── web_search.py ├── ComputerVision │ ├── 4-0 │ │ ├── how-to.py │ │ ├── quickstart.py │ │ └── sample.jpg │ ├── ComputerVisionQuickstart-single.py │ ├── ComputerVisionQuickstart.py │ ├── DetectObjectsTags.py │ ├── ExtractText.py │ ├── ExtractTextPDF.py │ ├── ImageAnalysisQuickstart-single.py │ ├── ImageAnalysisQuickstart.py │ ├── ImageCaptioningQuickstart.py │ └── REST │ │ ├── python-analyze.md │ │ ├── python-disk.md │ │ ├── python-domain.md │ │ ├── python-hand-text.md │ │ ├── python-print-text.md │ │ └── python-thumb.md ├── ContentModerator │ └── ContentModeratorQuickstart.py ├── CustomVision │ ├── ImageClassification │ │ └── CustomVisionQuickstart.py │ └── ObjectDetection │ │ └── CustomVisionQuickstart.py ├── Face │ ├── Quickstart.py │ ├── cognsvcsdk │ │ ├── DetectFaceAttributes.py │ │ ├── DetectIdentifyFace.py │ │ ├── FaceQuickstart-single.py │ │ ├── FaceQuickstart.py │ │ └── README.md │ └── rest │ │ └── detect.py ├── FormRecognizer │ ├── FormRecognizerLogging.py │ ├── FormRecognizerQuickstart-preview.py │ ├── FormRecognizerQuickstart.py │ ├── how-to-find-endpoint-and-key.png │ ├── how-to-guide │ │ ├── business-card-model-output.md │ │ ├── general-document-model-output.md │ │ ├── id-document-output.md │ │ ├── invoice-model-output.md │ │ ├── layout-model-output.md │ │ ├── read-model-output.md │ │ ├── receipt-model-output.md │ │ └── w2-tax-model-output.md │ ├── images │ │ └── how-to-find-endpoint-and-key.png │ ├── quickstart-insuranceCard.ipynb │ ├── quickstart-receipt-analyze.ipynb │ ├── rest │ │ ├── python-business-cards.md │ │ ├── python-invoices.md │ │ ├── python-labeled-data.md │ │ ├── python-layout.md │ │ ├── python-receipts.md │ │ └── python-train-extract.md │ ├── sample-insurance-card.png │ ├── sample-receipt.png │ ├── v3-python-sdk-general-document-output.md │ ├── v3-python-sdk-layout-output.md │ ├── v3-python-sdk-prebuilt-invoice-output.md │ └── v3-python-sdk-read-output.md ├── LUIS │ ├── python-model-with-rest │ │ └── model.py │ ├── python-predict-with-rest │ │ └── predict.py │ ├── python-sdk-authoring-prediction │ │ ├── application_quickstart.py │ │ ├── prediction_quickstart.py │ │ └── readme.md │ ├── readme.md │ └── sdk-3x │ │ ├── .devcontainer │ │ ├── Dockerfile │ │ └── devcontainer.json │ │ └── authoring_and_predict.py ├── MetricsAdvisor │ └── metrics_advisor_rest_api.py ├── Multi-Service │ ├── README.md │ ├── autosuggest_cs.py │ ├── computer_vision_cs.py │ ├── content_moderator_cs.py │ ├── entity_search_cs.py │ ├── face_cs.py │ ├── image_text_to_speech.py │ ├── speech_call_center.py │ ├── speech_call_center_continuous.py │ ├── speech_cs.py │ ├── spell_check_cs.py │ ├── text_analytics_cs.py │ └── web_search_cs.py ├── Personalizer │ ├── azure-notebook │ │ ├── .ipynb_checkpoints │ │ │ └── MultislotPersonalizer-checkpoint.ipynb │ │ ├── MultislotPersonalizer.ipynb │ │ ├── README.md │ │ ├── example-rankrequest.json │ │ ├── products.json │ │ ├── simulated_users.json │ │ └── slots.json │ ├── multislot-quickstart │ │ ├── readme.md │ │ └── sample.py │ ├── quickstart-sdk │ │ └── personalizer-quickstart.py │ ├── readme.md │ └── sample.py ├── QnAMaker │ ├── rest │ │ ├── create-kb.py │ │ ├── publish-kb.py │ │ └── query-kb.py │ └── sdk │ │ ├── preview-sdk │ │ └── quickstart.py │ │ └── quickstart.py ├── TextAnalytics │ ├── REST │ │ └── rest-api-text-analytics-examples.py │ └── python-v3-client-library.py └── azure_management_service │ └── create_delete_resource.py ├── rest ├── FormRecognizer │ ├── how-to-guide │ │ ├── business-card-model-output.json │ │ ├── general-document-model-output.json │ │ ├── id-document-model-output.json │ │ ├── invoice-model-output.json │ │ ├── layout-model-output.json │ │ ├── read-model-output.json │ │ ├── receipt-model-output.json │ │ └── w2-tax-model-output.json │ └── ignore │ │ └── ignore.md └── test │ └── test.md └── ruby ├── BingSpellCheck └── quickstart.rb ├── ComputerVision ├── analyze-image │ ├── analyze-image.rb │ └── readme.md ├── extract-printed-text │ ├── get-printed-text.rb │ └── readme.md └── generate-a-thumbnail │ ├── get-thumbnail.rb │ └── readme.md ├── TextAnalytics └── REST │ ├── AnalyzeSentiment.rb │ ├── ExtractKeyPhrases.rb │ ├── RecognizeEntities.rb │ └── detectLanguages.rb ├── face └── rest │ └── detect.rb └── qnamaker └── sdk └── quickstart.rb /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Microsoft Open Source Code of Conduct 2 | 3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 4 | 5 | Resources: 6 | 7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) 8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) 9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns 10 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 4 | > Please provide us with the following information: 5 | > --------------------------------------------------------------- 6 | 7 | ### This issue is for a: (mark with an `x`) 8 | ``` 9 | - [ ] bug report -> please search issues before submitting 10 | - [ ] feature request 11 | - [ ] documentation issue or request 12 | - [ ] regression (a behavior that used to work and stopped in a new release) 13 | ``` 14 | 15 | ### Minimal steps to reproduce 16 | > 17 | 18 | ### Any log messages given by the failure 19 | > 20 | 21 | ### Expected/desired behavior 22 | > 23 | 24 | ### OS and Version? 25 | > Windows 7, 8 or 10. Linux (which distribution). macOS (Yosemite? El Capitan? Sierra?) 26 | 27 | ### Versions 28 | > 29 | 30 | ### Mention any other details that might be useful 31 | 32 | > --------------------------------------------------------------- 33 | > Thanks! We'll be in touch soon. 34 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Purpose 2 | 3 | * ... 4 | 5 | ## Does this introduce a breaking change? 6 | 7 | ``` 8 | [ ] Yes 9 | [ ] No 10 | ``` 11 | 12 | ## Pull Request Type 13 | What kind of change does this Pull Request introduce? 14 | 15 | 16 | ``` 17 | [ ] Bugfix 18 | [ ] Feature 19 | [ ] Code style update (formatting, local variables) 20 | [ ] Refactoring (no functional changes, no api changes) 21 | [ ] Documentation content changes 22 | [ ] Other... Please describe: 23 | ``` 24 | 25 | ## How to Test 26 | * Get the code 27 | 28 | ``` 29 | git clone [repo-address] 30 | cd [repo-name] 31 | git checkout [branch-name] 32 | npm install 33 | ``` 34 | 35 | * Test the code 36 | 37 | ``` 38 | ``` 39 | 40 | ## What to Check 41 | Verify that the following are valid 42 | * ... 43 | 44 | ## Other Information 45 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [project-title] Changelog 2 | 3 | 4 | # x.y.z (yyyy-mm-dd) 5 | 6 | *Features* 7 | * ... 8 | 9 | *Bug Fixes* 10 | * ... 11 | 12 | *Breaking Changes* 13 | * ... 14 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE -------------------------------------------------------------------------------- /curl/QnAMaker/create-knowledge-base.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Call script with following command-line call 4 | # 5 | # `bash create-knowledge-base.sh param-value-1 param-value-2` 6 | # `bash create-knowledge-base.sh my-resource-name 012345` 7 | 8 | # Param $1 is your QnA Maker resource name 9 | # Param $2 is your QnA Maker resource key 10 | # These values are found in the Azure portal - https://portal.azure.com 11 | 12 | # cURL returns 13 | # { 14 | # "operationState": "NotStarted", 15 | # "createdTimestamp": "2020-02-27T04:11:22Z", 16 | # "lastActionTimestamp": "2020-02-27T04:11:22Z", 17 | # "userId": "9596077b3e0441eb93d5080d6a15c64b", 18 | # "operationId": "95a4f700-9899-4c98-bda8-5449af9faef8" 19 | #} 20 | 21 | # In order to get knowledge base id for continuing operations, get status 22 | # using the operationID 23 | 24 | curl https://$1.cognitiveservices.azure.com/qnamaker/v4.0/knowledgebases/create \ 25 | -X POST \ 26 | -H "Ocp-Apim-Subscription-Key: $2" \ 27 | -H "Content-Type:application/json" \ 28 | -H "Content-Size:107" \ 29 | -d '{ name: "QnA Maker FAQ",urls: [ "https://docs.microsoft.com/en-in/azure/cognitive-services/qnamaker/faqs"]}' -------------------------------------------------------------------------------- /curl/QnAMaker/delete-knowledge-base.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Call script with following command-line call 4 | # 5 | # `bash delete-knowledge-base.sh param-value-1 param-value-2 param-value-3` 6 | # `bash delete-knowledge-base.sh my-resource-name 012345 9999999` 7 | 8 | # Param $1 is your QnA Maker resource name 9 | # Param $2 is your QnA Maker resource key 10 | # Param $3 is your knowledge base id 11 | 12 | # -v param to curl requests verbose response which includes the HTTP response 13 | # Response is 204 with no results 14 | 15 | curl https://$1.cognitiveservices.azure.com/qnamaker/v4.0/knowledgebases/$3 \ 16 | -X DELETE \ 17 | -v \ 18 | -H "Ocp-Apim-Subscription-Key: $2" 19 | -------------------------------------------------------------------------------- /curl/QnAMaker/get-answer-from-runtime.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Call script with following command-line call 4 | # 5 | # `bash get-answer-from-runtim.sh param-value-1 param-value-2 param-value-3` 6 | # `bash get-answer-from-runtim.sh my-resource-name 012345 898989889` 7 | 8 | # Param $1 is your QnA Maker resource name 9 | # Param $2 is your QnA Maker endpoint key 10 | # Param $3 is your knowledge base id 11 | 12 | curl https://$1.azurewebsites.net/qnamaker/knowledgebases/$3/generateAnswer \ 13 | -X POST \ 14 | -H "Authorization: EndpointKey $2" \ 15 | -H "Content-Type:application/json" \ 16 | -H "Content-Size:159" \ 17 | -d '{"question": "qna maker and luis","top": 6,"isTest": true, "scoreThreshold": 20, "strictFilters": [], "userId": "sd53lsY="}' -------------------------------------------------------------------------------- /curl/QnAMaker/get-operation-status.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Call script with following command-line call 4 | # 5 | # `bash get-operation-status.sh param-value-1 param-value2 param-value-3` 6 | # `bash get-operation-status.sh my-resource-name 012345 678678678` 7 | 8 | # Param $1 is your QnA Maker resource name 9 | # Param $2 is your QnA Maker resource key 10 | # Param $3 is your operationId, returned from a previous API call such as create or train 11 | 12 | # cURL returns 13 | # { 14 | # "operationState": "Succeeded", 15 | # "createdTimestamp": "2020-02-27T04:54:07Z", 16 | # "lastActionTimestamp": "2020-02-27T04:54:19Z", 17 | # "resourceLocation": "/knowledgebases/fe3971b7-cfaa-41fa-8d9f-6ceb673eb865", 18 | # "userId": "f596077b3e0441eb93d5080d6a15c64b", 19 | # "operationId": "f293f218-d080-48f0-a766-47993e9b26a8" 20 | # } 21 | 22 | 23 | curl https://$1.cognitiveservices.azure.com/qnamaker/v4.0/operations/$3 \ 24 | -X GET \ 25 | -H "Ocp-Apim-Subscription-Key: $2" -------------------------------------------------------------------------------- /curl/QnAMaker/get-query-endpoint-key.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Call script with following command-line call 4 | # Knowledge base must be published first 5 | # 6 | # `bash get-query-endpoint-key.sh param-value-1 param-value2` 7 | # `bash get-query-endpoint-key.sh my-resource-name 012345` 8 | 9 | # Param $1 is your QnA Maker resource name 10 | # Param $2 is your QnA Maker resource key 11 | 12 | # Results are: 13 | # { 14 | # "primaryEndpointKey": "73e88a14-694a-44d5-883b-184a68aa8530", 15 | # "secondaryEndpointKey": "b2c98c16-ca31-4294-8626-6c57454a5063", 16 | # "installedVersion": "4.0.5", 17 | # "lastStableVersion": "4.0.6" 18 | #} 19 | 20 | 21 | curl https://$1.cognitiveservices.azure.com/qnamaker/v4.0/endpointkeys \ 22 | -X GET \ 23 | -H "Ocp-Apim-Subscription-Key: $2" -------------------------------------------------------------------------------- /curl/QnAMaker/publish-knowledge-base.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Call script with following command-line call 4 | # 5 | # `bash create-knowledge-base.sh param-value-1 param-value-2 param-value-3` 6 | # `bash create-knowledge-base.sh my-resource-name 012345 7867868` 7 | 8 | # Param $1 is your QnA Maker resource name 9 | # Param $2 is your QnA Maker resource key 10 | # Param $3 is your knowledge base id 11 | 12 | # -v param to curl requests verbose response which includes the HTTP response 13 | # Response is 204 with no results 14 | 15 | curl https://$1.cognitiveservices.azure.com/qnamaker/v4.0/knowledgebases/$3 \ 16 | -v \ 17 | -X POST \ 18 | -H "Ocp-Apim-Subscription-Key: $2" \ 19 | --data-raw '' -------------------------------------------------------------------------------- /curl/QnAMaker/readme.md: -------------------------------------------------------------------------------- 1 | # Use QnA Maker's REST APIs with Bash scripts 2 | 3 | These bash scripts use [cURL](https://curl.haxx.se/) to request from the QnA Maker service. 4 | 5 | | Task | Reference documentation| 6 | |--|--| 7 | |[Create knowledge base](create-knowledge-base.sh)|[API](https://docs.microsoft.com/rest/api/cognitiveservices/qnamaker/knowledgebase/create)| 8 | |[Get Operation status](create-knowledge-base.sh)|[API](https://docs.microsoft.com/rest/api/cognitiveservices/qnamaker/operations/getdetails)| 9 | |[Publish knowledge base](publish-knowledge-base.sh)|[API](https://docs.microsoft.com/rest/api/cognitiveservices/qnamaker/knowledgebase/publish)| 10 | |[Get published knowledge base endpoint key](get-query-endpoint-key.sh)|[API](https://docs.microsoft.com/rest/api/cognitiveservices/qnamaker/endpointkeys/getkeys)| 11 | |[Get answer from published knowledge base](get-answer-from-endpoint.sh)|[API](https://docs.microsoft.com/rest/api/cognitiveservices/qnamakerruntime/runtime/generateanswer)| 12 | |[Delete knowledge base](delete-knowledge-base.sh)|[API](https://docs.microsoft.com/rest/api/cognitiveservices/qnamaker/knowledgebase/delete)| 13 | -------------------------------------------------------------------------------- /curl/content-moderator/quickstart.sh: -------------------------------------------------------------------------------- 1 | # 2 | curl -v -X POST "https://westus.api.cognitive.microsoft.com/contentmoderator/moderate/v1.0/ProcessImage/Evaluate?CacheImage={boolean}" 3 | -H "Content-Type: application/json" 4 | -H "Ocp-Apim-Subscription-Key: {subscription key}" 5 | --data-ascii "{\"DataRepresentation\":\"URL\", \"Value\":\"https://moderatorsampleimages.blob.core.windows.net/samples/sample.jpg\"}" 6 | # 7 | 8 | # 9 | curl -v -X POST "https://westus.api.cognitive.microsoft.com/contentmoderator/moderate/v1.0/ProcessText/Screen?autocorrect=True&PII=True&classify=True&language={string}" 10 | -H "Content-Type: text/plain" 11 | -H "Ocp-Apim-Subscription-Key: {subscription key}" 12 | --data-ascii "Is this a crap email abcdef@abcd.com, phone: 6657789887, IP: 255.255.255.255, 1 Microsoft Way, Redmond, WA 98052" 13 | # -------------------------------------------------------------------------------- /curl/custom-vision/image-classifier.sh: -------------------------------------------------------------------------------- 1 | # 2 | curl -v -X POST -H "Training-key: {subscription key}" "https://{endpoint}/customvision/v3.3/Training/projects?name={name}" 3 | # 4 | 5 | # 6 | curl -v -X POST -H "Training-key: {subscription key}" "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/tags?name={name}" 7 | # 8 | 9 | # 10 | curl -v -X POST -H "Content-Type: multipart/form-data" -H "Training-key: {subscription key}" "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/images?tagIds={tagArray}" 11 | --data-ascii "{binary data}" 12 | # 13 | 14 | # 15 | curl -v -X POST -H "Content-Type: application/json" -H "Training-key: {subscription key}" "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/train" 16 | # 17 | 18 | # 19 | curl -v -X POST -H "Training-key: {subscription key}" "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/iterations/{iterationId}/publish?publishName={publishName}&predictionId={predictionId}" 20 | # 21 | 22 | # 23 | curl -v -X POST -H "Content-Type: application/octet-stream" -H "Prediction-key: {subscription key}" "https://{endpoint}/customvision/v3.1/Prediction/{projectId}/classify/iterations/{publishedName}/image" 24 | --data-ascii "{binary data}" 25 | # 26 | 27 | -------------------------------------------------------------------------------- /curl/custom-vision/object-detector.sh: -------------------------------------------------------------------------------- 1 | # 2 | curl -v -X POST "https://{endpoint}/customvision/v3.3/Training/projects?name={name}&domainId=da2e3a8a-40a5-4171-82f4-58522f70fbc1" 3 | -H "Training-key: {subscription key}" 4 | # 5 | 6 | # 7 | curl -v -X POST "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/tags?name={name}" 8 | -H "Training-key: {subscription key}" 9 | # 10 | 11 | # 12 | curl -v -X POST "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/images?tagIds={tagArray}" 13 | -H "Content-Type: multipart/form-data" 14 | -H "Training-key: {subscription key}" 15 | --data-ascii "{binary data}" 16 | # 17 | 18 | # 19 | curl -v -X POST "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/train" 20 | -H "Content-Type: application/json" 21 | -H "Training-key: {subscription key}" 22 | # 23 | 24 | # 25 | curl -v -X POST "https://{endpoint}/customvision/v3.3/Training/projects/{projectId}/iterations/{iterationId}/publish?publishName={publishName}&predictionId={predictionId}" 26 | -H "Training-key: {subscription key}" 27 | # 28 | 29 | # 30 | curl -v -X POST "https://{endpoint}/customvision/v3.1/Prediction/{projectId}/classify/iterations/{publishedName}/image" 31 | -H "Content-Type: application/octet-stream" 32 | -H "Prediction-key: {subscription key}" 33 | --data-ascii "{binary data}" 34 | # 35 | 36 | -------------------------------------------------------------------------------- /curl/speech/speech-to-text.sh: -------------------------------------------------------------------------------- 1 | # 2 | curl --location --request POST 'https://INSERT_REGION_HERE.stt.speech.microsoft.com/speech/recognition/conversation/cognitiveservices/v1?language=en-US' \ 3 | --header 'Ocp-Apim-Subscription-Key: INSERT_SUBSCRIPTION_KEY_HERE' \ 4 | --header 'Content-Type: audio/wav' \ 5 | --data-binary @'INSERT_AUDIO_FILE_PATH_HERE' 6 | # 7 | 8 | # 9 | { 10 | "RecognitionStatus": "Success", 11 | "DisplayText": "My voice is my passport, verify me.", 12 | "Offset": 6600000, 13 | "Duration": 32100000 14 | } 15 | # -------------------------------------------------------------------------------- /curl/speech/text-to-speech.sh: -------------------------------------------------------------------------------- 1 | curl --location --request POST 'https://INSERT_REGION_HERE.tts.speech.microsoft.com/cognitiveservices/v1' \ 2 | --header 'Ocp-Apim-Subscription-Key: INSERT_SUBSCRIPTION_KEY_HERE' \ 3 | --header 'Content-Type: application/ssml+xml' \ 4 | --header 'X-Microsoft-OutputFormat: audio-16khz-128kbitrate-mono-mp3' \ 5 | --header 'User-Agent: curl' \ 6 | --data-raw ' 7 | 8 | my voice is my passport verify me 9 | 10 | ' > output.mp3 11 | -------------------------------------------------------------------------------- /dotnet/BingAutoSuggest/Program.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Linq; 4 | using System.Text; 5 | using System.Threading.Tasks; 6 | 7 | using Microsoft.Azure.CognitiveServices.Search.AutoSuggest; 8 | 9 | namespace Autosuggest_CS 10 | { 11 | class Program 12 | { 13 | private static readonly string subscription_key = "PASTE_YOUR_AUTO_SUGGEST_SUBSCRIPTION_KEY_HERE"; 14 | private static readonly string endpoint = "PASTE_YOUR_AUTO_SUGGEST_ENDPOINT_HERE"; 15 | 16 | async static Task RunQuickstart() 17 | { 18 | // Generate the credentials and create the client. 19 | var credentials = new Microsoft.Azure.CognitiveServices.Search.AutoSuggest.ApiKeyServiceClientCredentials(subscription_key); 20 | var client = new AutoSuggestClient(credentials, new System.Net.Http.DelegatingHandler[] { }) 21 | { 22 | Endpoint = endpoint 23 | }; 24 | 25 | var result = await client.AutoSuggestMethodAsync("xb"); 26 | var groups = result.SuggestionGroups; 27 | if (groups.Count > 0) { 28 | var group = groups[0]; 29 | Console.Write("First suggestion group: {0}\n", group.Name); 30 | var suggestions = group.SearchSuggestions; 31 | if (suggestions.Count > 0) 32 | { 33 | Console.WriteLine("First suggestion:"); 34 | Console.WriteLine("Query: {0}", suggestions[0].Query); 35 | Console.WriteLine("Display text: {0}", suggestions[0].DisplayText); 36 | } 37 | else 38 | { 39 | Console.WriteLine("No suggestions found in this group."); 40 | } 41 | } 42 | else 43 | { 44 | Console.WriteLine("No suggestions found."); 45 | } 46 | } 47 | 48 | static void Main(string[] args) 49 | { 50 | Task.WaitAll(RunQuickstart()); 51 | Console.WriteLine("Press any key to exit."); 52 | Console.Read(); 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /dotnet/BingSpellCheck/BingSpellCheckQuickstart.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Threading.Tasks; 3 | using Microsoft.Azure.CognitiveServices.Language.SpellCheck; 4 | using Microsoft.Azure.CognitiveServices.Language.SpellCheck.Models; 5 | 6 | /* 7 | * This Bing Spell Check quickstart takes in some misspelled words and suggests corrections. 8 | * 9 | * Prerequisites: 10 | * - Copy/paste this code into your Program.cs of a new Console App project in Visual Studio. 11 | * - Add your Bing Spell Check subscription key to your environment variables, using BING_SPELL_CHECK_SUBSCRIPTION_KEY as variable names. 12 | * - Install the NuGet package: Microsoft.Azure.CognitiveServices.Language.SpellCheck 13 | * 14 | * References: 15 | * - Bing Spell Check documentation: https://docs.microsoft.com/en-us/azure/cognitive-services/bing-spell-check/index 16 | * - Dotnet SDK: https://docs.microsoft.com/en-us/dotnet/api/overview/azure/cognitiveservices/client/bingspellcheck?view=azure-dotnet 17 | * - API: https://docs.microsoft.com/en-us/rest/api/cognitiveservices-bingsearch/bing-spell-check-api-v7-reference 18 | */ 19 | 20 | namespace BingSpellCheckQuickstart 21 | { 22 | class Program 23 | { 24 | static void Main(string[] args) 25 | { 26 | string query = "bill Gatas was ehre"; // Bill Gates was here 27 | 28 | // Authenticate 29 | string key = "PASTE_YOUR_SPELL_CHECK_SUBSCRIPTION_KEY_HERE"; 30 | 31 | var client = new SpellCheckClient( 32 | new ApiKeyServiceClientCredentials(key)); 33 | 34 | // Call API to check spelling of query 35 | checkSpelling(client, query).Wait(); 36 | 37 | } 38 | 39 | public static async Task checkSpelling(SpellCheckClient client, string query) 40 | { 41 | var result = await client.SpellCheckerAsync(text: query, mode: "proof"); 42 | 43 | Console.WriteLine("Original query: \n" + query); 44 | Console.WriteLine(); 45 | Console.WriteLine("Misspelled words:"); 46 | foreach (SpellingFlaggedToken token in result.FlaggedTokens) 47 | { 48 | Console.WriteLine(token.Token); 49 | } 50 | 51 | Console.WriteLine(); 52 | Console.WriteLine("Suggested corrections:"); 53 | foreach (SpellingFlaggedToken token in result.FlaggedTokens) 54 | { 55 | foreach(SpellingTokenSuggestion suggestion in token.Suggestions) 56 | { 57 | // Confidence values range from 0 (none) to 1.0 (full confidence) 58 | Console.WriteLine(suggestion.Suggestion + " with confidence " + Math.Round((decimal)suggestion.Score, 2)); 59 | } 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /dotnet/ComputerVision/4-0/image-analysis-how-to/image-analysis-how-to.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | Exe 4 | net8.0 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /dotnet/ComputerVision/4-0/image-analysis-how-to/sample.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/dotnet/ComputerVision/4-0/image-analysis-how-to/sample.jpg -------------------------------------------------------------------------------- /dotnet/ComputerVision/4-0/image-analysis-quickstart/Program.cs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Microsoft Corporation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | // 5 | using Azure; 6 | using Azure.AI.Vision.ImageAnalysis; 7 | using System; 8 | 9 | public class Program 10 | { 11 | static void AnalyzeImage() 12 | { 13 | string endpoint = Environment.GetEnvironmentVariable("VISION_ENDPOINT"); 14 | string key = Environment.GetEnvironmentVariable("VISION_KEY"); 15 | 16 | ImageAnalysisClient client = new ImageAnalysisClient( 17 | new Uri(endpoint), 18 | new AzureKeyCredential(key)); 19 | 20 | ImageAnalysisResult result = client.Analyze( 21 | new Uri("https://learn.microsoft.com/azure/ai-services/computer-vision/media/quickstarts/presentation.png"), 22 | VisualFeatures.Caption | VisualFeatures.Read, 23 | new ImageAnalysisOptions { GenderNeutralCaption = true }); 24 | 25 | Console.WriteLine("Image analysis results:"); 26 | Console.WriteLine(" Caption:"); 27 | Console.WriteLine($" '{result.Caption.Text}', Confidence {result.Caption.Confidence:F4}"); 28 | 29 | Console.WriteLine(" Read:"); 30 | foreach (DetectedTextBlock block in result.Read.Blocks) 31 | foreach (DetectedTextLine line in block.Lines) 32 | { 33 | Console.WriteLine($" Line: '{line.Text}', Bounding Polygon: [{string.Join(" ", line.BoundingPolygon)}]"); 34 | foreach (DetectedTextWord word in line.Words) 35 | { 36 | Console.WriteLine($" Word: '{word.Text}', Confidence {word.Confidence.ToString("#.####")}, Bounding Polygon: [{string.Join(" ", word.BoundingPolygon)}]"); 37 | } 38 | } 39 | } 40 | 41 | static void Main() 42 | { 43 | try 44 | { 45 | AnalyzeImage(); 46 | } 47 | catch (Exception e) 48 | { 49 | Console.WriteLine(e); 50 | } 51 | } 52 | } 53 | // 54 | -------------------------------------------------------------------------------- /dotnet/ComputerVision/4-0/image-analysis-quickstart/image-analysis-quickstart.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | Exe 4 | net8.0 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /dotnet/Face/cognsvcsdk/analyze.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | /* See: 3 | * https://github.com/Microsoft/Cognitive-Samples-VideoFrameAnalysis 4 | * Compile and add reference to VideoFrameAnalyzer.dll. 5 | * Install NuGet package OpenCVSharp. 6 | */ 7 | using VideoFrameAnalyzer; 8 | // Install NuGet package Microsoft.Azure.CognitiveServices.Vision.Face. 9 | using Microsoft.Azure.CognitiveServices.Vision.Face; 10 | using Microsoft.Azure.CognitiveServices.Vision.Face.Models; 11 | using System.Collections.Generic; 12 | using System.Threading.Tasks; 13 | 14 | namespace VideoFrameConsoleApplication 15 | { 16 | class Program 17 | { 18 | static string SUBSCRIPTION_KEY = "PASTE_YOUR_FACE_SUBSCRIPTION_KEY_HERE"; 19 | static string ENDPOINT = "PASTE_YOUR_FACE_ENDPOINT_HERE"; 20 | 21 | static void Main(string[] args) 22 | { 23 | IFaceClient client = new FaceClient(new ApiKeyServiceClientCredentials(SUBSCRIPTION_KEY)) { Endpoint = ENDPOINT }; 24 | 25 | // Define this in Main so it is closed over the client. 26 | async Task Detect(VideoFrame frame) 27 | { 28 | return (DetectedFace[])await client.Face.DetectWithStreamAsync(frame.Image.ToMemoryStream(".jpg"), detectionModel:DetectionModel.Detection03); 29 | } 30 | 31 | // Create grabber, with analysis type Face[]. 32 | FrameGrabber grabber = new FrameGrabber(); 33 | 34 | // Set up our Face API call. 35 | grabber.AnalysisFunction = Detect; 36 | 37 | // Set up a listener for when we receive a new result from an API call. 38 | grabber.NewResultAvailable += (s, e) => 39 | { 40 | if (e.Analysis != null) 41 | Console.WriteLine("New result received for frame acquired at {0}. {1} faces detected", e.Frame.Metadata.Timestamp, e.Analysis.Length); 42 | }; 43 | 44 | // Tell grabber to call the Face API every 3 seconds. 45 | grabber.TriggerAnalysisOnInterval(TimeSpan.FromMilliseconds(3000)); 46 | 47 | // Start running. 48 | grabber.StartProcessingCameraAsync().Wait(); 49 | 50 | // Wait for keypress to stop 51 | Console.WriteLine("Press any key to stop..."); 52 | Console.ReadKey(); 53 | 54 | // Stop, blocking until done. 55 | grabber.StopProcessingAsync().Wait(); 56 | } 57 | } 58 | } -------------------------------------------------------------------------------- /dotnet/FormRecognizer/how-to-guide/business-card-model-output.md: -------------------------------------------------------------------------------- 1 | Contact Name: 2 | 3 | First Name: 'Avery', with confidence 0.989 4 | 5 | Last Name: 'Smith', with confidence 0.989 6 | 7 | Job Title: 'Senior Researcher', with confidence 0.988 8 | 9 | Department: 'Cloud & Al Department', with confidence 0.973 10 | 11 | Email: 'avery.smith@contoso.com', with confidence 0.988 12 | 13 | Website: 'https://www.contoso.com/', with confidence 0.989 14 | 15 | Mobile phone number: '+10791112345', with confidence 0.987 16 | 17 | Work phone number: '+10209875432', with confidence 0.985 18 | 19 | Fax phone number: '+10207892345', with confidence 0.987 20 | 21 | Company name: 'Contoso', with confidence 0.895 22 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/how-to-guide/id-document-model-output.md: -------------------------------------------------------------------------------- 1 | CountryRegion: 'USA', with confidence 0.995 2 | 3 | Date Of Birth: '3/23/1988 12:00:00 AM +00:00', with confidence 0.99 4 | 5 | Date Of Expiration: '3/23/2026 12:00:00 AM +00:00', with confidence 0.99 6 | 7 | Document Number: '034568', with confidence 0.99 8 | 9 | First Name: 'CHRIS', with confidence 0.989 10 | 11 | Last Name: 'SMITH', with confidence 0.989 12 | 13 | Region: 'West Virginia', with confidence 0.99 14 | 15 | Sex: 'M', with confidence 0.99 16 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/how-to-guide/invoice-model-output.md: -------------------------------------------------------------------------------- 1 | Document 0: 2 | 3 | Vendor Name: 'CONTOSO LTD.', with confidence 0.925 4 | 5 | Customer Name: 'MICROSOFT CORPORATION', with confidence 0.908 6 | 7 | Item: 8 | 9 | Description: 'Consulting Services', with confidence 0.951 10 | 11 | Amount: '$60', with confidence 0.967 12 | 13 | Item: 14 | 15 | Description: 'Document Fee', with confidence 0.951 16 | 17 | Amount: '$30', with confidence 0.967 18 | 19 | Item: 20 | 21 | Description: 'Printing Fee', with confidence 0.951 22 | 23 | Amount: '$10', with confidence 0.967 24 | 25 | Sub Total: '$100', with confidence 0.968 26 | 27 | Total Tax: '$10', with confidence 0.959 28 | 29 | Invoice Total: '$110', with confidence 0.945 30 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/how-to-guide/receipt-model-output.md: -------------------------------------------------------------------------------- 1 | Merchant Name: 'Contoso', with confidence 0.616 2 | 3 | Transaction Date: '6/10/2019 12:00:00 AM +00:00', with confidence 0.989 4 | 5 | Item: 6 | 7 | Description: 'Surface Pro 6', with confidence 0.99 8 | 9 | Total Price: '1998', with confidence 0.995 10 | 11 | Item: 12 | 13 | Description: 'Surface Pen', with confidence 0.99 14 | 15 | Total Price: '299.97', with confidence 0.995 16 | 17 | Total: '2516.28', with confidence '0.988' 18 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/how-to-guide/w2-tax-model-output.md: -------------------------------------------------------------------------------- 1 | Document 0: 2 | 3 | AdditionalInfo: 4 | 5 | Amount: '6939.68', with confidence 0 6 | 7 | LetterCode: 'DD', with confidence 0 8 | 9 | AdditionalInfo: 10 | 11 | Amount: '5432', with confidence 0 12 | 13 | LetterCode: 'F', with confidence 0 14 | 15 | AdditionalInfo: 16 | 17 | Amount: '876.3', with confidence 0 18 | 19 | LetterCode: 'D', with confidence 0 20 | 21 | AdditionalInfo: 22 | 23 | Amount: '123.3', with confidence 0 24 | 25 | LetterCode: 'C', with confidence 0 26 | 27 | Allocated Tips: '874.2', with confidence 0.999 28 | 29 | Employer Name: 'CONTOSO LTD', with confidence 0.999 30 | 31 | Employer ID Number: '98-7654321', with confidence 0.999 32 | 33 | Employer Address: '123 MICROSOFT WAY 34 | 35 | REDMOND, WA 98765', with confidence 0.999 36 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/v0.8.0-project/App.config: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/v0.8.0-project/FormRecognizer.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | VisualStudioVersion = 15.0.27703.2035 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FormRecognizer", "FormRecognizer.csproj", "{9A48A4F9-E504-4DAA-B7CB-25623E9EB16A}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {9A48A4F9-E504-4DAA-B7CB-25623E9EB16A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {9A48A4F9-E504-4DAA-B7CB-25623E9EB16A}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {9A48A4F9-E504-4DAA-B7CB-25623E9EB16A}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {9A48A4F9-E504-4DAA-B7CB-25623E9EB16A}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {A20C50F9-8818-4C1F-B83D-A3B2179B45FC} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/v0.8.0-project/Properties/AssemblyInfo.cs: -------------------------------------------------------------------------------- 1 | using System.Reflection; 2 | using System.Runtime.CompilerServices; 3 | using System.Runtime.InteropServices; 4 | 5 | // General Information about an assembly is controlled through the following 6 | // set of attributes. Change these attribute values to modify the information 7 | // associated with an assembly. 8 | [assembly: AssemblyTitle("FormRecognizer")] 9 | [assembly: AssemblyDescription("")] 10 | [assembly: AssemblyConfiguration("")] 11 | [assembly: AssemblyCompany("")] 12 | [assembly: AssemblyProduct("FormRecognizer")] 13 | [assembly: AssemblyCopyright("Copyright © 2018")] 14 | [assembly: AssemblyTrademark("")] 15 | [assembly: AssemblyCulture("")] 16 | 17 | // Setting ComVisible to false makes the types in this assembly not visible 18 | // to COM components. If you need to access a type in this assembly from 19 | // COM, set the ComVisible attribute to true on that type. 20 | [assembly: ComVisible(false)] 21 | 22 | // The following GUID is for the ID of the typelib if this project is exposed to COM 23 | [assembly: Guid("9a48a4f9-e504-4daa-b7cb-25623e9eb16a")] 24 | 25 | // Version information for an assembly consists of the following four values: 26 | // 27 | // Major Version 28 | // Minor Version 29 | // Build Number 30 | // Revision 31 | // 32 | // You can specify all the values or you can default the Build and Revision Numbers 33 | // by using the '*' as shown below: 34 | // [assembly: AssemblyVersion("1.0.*")] 35 | [assembly: AssemblyVersion("1.0.0.0")] 36 | [assembly: AssemblyFileVersion("1.0.0.0")] 37 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/v0.8.0-project/packages.config: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /dotnet/FormRecognizer/v3-csharp-quickstart-prebuilt-invoice-output.md: -------------------------------------------------------------------------------- 1 | # Quickstart output: C# SDK prebuilt invoice model (beta) 2 | 3 | [Reference documentation](https://docs.microsoft.com/dotnet/api/azure.ai.formrecognizer.documentanalysis?view=azure-dotnet-preview&preserve-view=true) | [Library Source Code](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.FormRecognizer_4.0.0-beta.3/sdk/formrecognizer/Azure.AI.FormRecognizer/) | [Package (NuGet)](https://www.nuget.org/packages/Azure.AI.FormRecognizer/4.0.0-beta.3) | [Samples](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/formrecognizer/Azure.AI.FormRecognizer/samples/README.md) 4 | 5 | You can get started using the Azure Form Recognizer prebuilt model with the [C# programming language quickstart](https://docs.microsoft.com/azure/applied-ai-services/form-recognizer/quickstarts/try-v3-csharp-sdk#prebuilt-model). The prebuilt model analyzes and extracts common fields from specific document types using a prebuilt model. Here is the expected outcome from the prebult invoice model quickstart code: 6 | 7 | ## Prebuilt invoice model output 8 | 9 | Document 0: 10 | 11 | Vendor Name: 'CONTOSO LTD.', with confidence 0.962 12 | 13 | Customer Name: 'MICROSOFT CORPORATION', with confidence 0.951 14 | 15 | Item: 16 | 17 |   Description: 'Test for 23 fields', with confidence 0.899 18 | 19 |   Amount: '100', with confidence 0.902 20 | 21 | Sub Total: '100', with confidence 0.979 22 | 23 | Total Tax: '10', with confidence 0.979 24 | 25 | Invoice Total: '110', with confidence 0.973 26 | -------------------------------------------------------------------------------- /dotnet/LanguageUnderstanding/csharp-model-with-rest/csharp-model-with-rest.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | csharp_model_with_rest 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /dotnet/LanguageUnderstanding/csharp-predict-with-rest/csharp-predict-with-rest.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | csharp_predict_with_rest 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /dotnet/LanguageUnderstanding/predict-with-sdk-3x/predict-with-sdk-3x.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp2.1 6 | predict_3x 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dotnet/LanguageUnderstanding/predict-with-sdk-3x/readme.md: -------------------------------------------------------------------------------- 1 | # How to run this sample 2 | 3 | The .Net Core file queries a public Language Understanding (LUIS) app and prints the results to the console. 4 | 5 | 1. Add your own LUIS prediction key (assigned to app in LUIS portal), endpoint URL, and app id: 6 | 1. Run `dotnet build` to build the file. 7 | 1. Run `dotnet run` to run the file. 8 | 9 | The output is: 10 | 11 | ```console 12 | PS C:\Users\sam\cognitive-services-quickstart-code\dotnet\LanguageUnderstanding\predict-with-sdk-3x> dotnet run 13 | Query:'turn on the bedroom light' 14 | TopIntent :'HomeAutomation.TurnOn' 15 | HomeAutomation.TurnOn:0.1548855 16 | None:0.142956868 17 | HomeAutomation.TurnOff:0.0307567716 18 | done 19 | ``` 20 | -------------------------------------------------------------------------------- /dotnet/LanguageUnderstanding/sdk-3x/.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/vscode-remote/devcontainer.json or this file's README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.128.0/containers/dotnetcore 3 | { 4 | "name": "C# (.NET Core)", 5 | "build": { 6 | "dockerfile": "Dockerfile", 7 | "args": { 8 | // Update 'VARIANT' to pick a .NET Core version. Rebuild the container if 9 | // it already exists to update. Example variants: 2.1-bionic, 3.1-bionic 10 | "VARIANT": "3.1-bionic", 11 | // Options 12 | "INSTALL_NODE": "true", 13 | "NODE_VERSION": "lts/*", 14 | "INSTALL_AZURE_CLI": "false", 15 | "UPGRADE_PACKAGES": "false" 16 | } 17 | }, 18 | // Set *default* container specific settings.json values on container create. 19 | "settings": { 20 | "terminal.integrated.shell.linux": "/bin/bash" 21 | }, 22 | // Add the IDs of extensions you want installed when the container is created. 23 | "extensions": [ 24 | "ms-dotnettools.csharp" 25 | ], 26 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 27 | // "forwardPorts": [5000, 5001], 28 | // [Optional] To reuse of your local HTTPS dev cert, first export it locally using this command: 29 | // * Windows PowerShell: 30 | // dotnet dev-certs https --trust; dotnet dev-certs https -ep "$env:USERPROFILE/.aspnet/https/aspnetapp.pfx" -p "SecurePwdGoesHere" 31 | // * macOS/Linux terminal: 32 | // dotnet dev-certs https --trust; dotnet dev-certs https -ep "${HOME}/.aspnet/https/aspnetapp.pfx" -p "SecurePwdGoesHere" 33 | // 34 | // Next, after running the command above, uncomment lines in the 'mounts' and 'remoteEnv' lines below, 35 | // and open / rebuild the container so the settings take effect. 36 | // 37 | "mounts": [ 38 | // "source=${env:HOME}${env:USERPROFILE}/.aspnet/https,target=/home/vscode/.aspnet/https,type=bind" 39 | ], 40 | "remoteEnv": { 41 | // "ASPNETCORE_Kestrel__Certificates__Default__Password": "SecurePwdGoesHere", 42 | // "ASPNETCORE_Kestrel__Certificates__Default__Path": "/home/vscode/.aspnet/https/aspnetapp.pfx", 43 | } 44 | // Use 'postCreateCommand' to run commands after the container is created. 45 | , 46 | "postCreateCommand": "dotnet restore", 47 | // Uncomment to connect as a non-root user. See https://aka.ms/vscode-remote/containers/non-root. 48 | // "remoteUser": "vscode" 49 | } -------------------------------------------------------------------------------- /dotnet/LanguageUnderstanding/sdk-3x/LUIS_SDK_3x.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /dotnet/Personalizer/multislot-quickstart-v2PreviewSdk/README.md: -------------------------------------------------------------------------------- 1 | # About this Quickstart 2 | 3 | Multi-slot personalization (Preview) allows you to target content in web layouts, carousels, and lists where more than one action (such as a product or piece of content) is shown to your users. With Personalizer multi-slot APIs, you can have the AI models in Personalizer learn what user contexts and products drive certain behaviors, considering and learning from the placement in your user interface. For example, Personalizer may learn that certain products or content drive more clicks as a sidebar or a footer than as a main highlight on a page. 4 | 5 | This sample asks for the time of day and device type to determine which items to display on a retail app/website. You can select if that top choice is what you would pick.. 6 | 7 | # To try this sample 8 | 9 | ## Prerequisites 10 | 11 | The solution is a C# .NET console app project, so you will need [.NET 5.0](https://dotnet.microsoft.com/download/dotnet/5.0), and [Visual Studio 2019](https://visualstudio.microsoft.com/vs/). 12 | 13 | ## Upgrade Persoanlizer instance to multi-slot 14 | 15 | 1. Configure your Personalizer instance for multi-slot (see [Setting up](https://docs.microsoft.com/en-us/azure/cognitive-services/personalizer/how-to-multi-slot?pivots=programming-language-csharp)) 16 | 17 | ## Set up the sample 18 | 19 | - Clone the Azure Personalizer Samples repo. 20 | 21 | ```bash 22 | git clone https://github.com/Azure-Samples/cognitive-services-quickstart-code.git 23 | ``` 24 | 25 | - Navigate to _dotnet/Personalizer/multislot-quickstart-v2PreviewSdk_. 26 | 27 | 28 | ## Set up Azure Personalizer Service 29 | 30 | - Create a Personalizer instance in the Azure portal. 31 | 32 | - You can find your key and endpoint in the resource's key and endpoint page, under resource management (Keys and Endpoint). 33 | 34 | 1. Update PersonalizationBaseUrl value ("") in Program.cs with the endpoint specific to your Personalizer service instance. 35 | 36 | 1. Update ResourceKey value ("") in Program.cs with the key specific to your Personalizer service instance. 37 | 38 | ## Run the sample 39 | 40 | Build and run the sample by pressing **F5** in Visual Studio, or `dotnet buld` then `dotnet run` in the same directory as multislot-quickstart.csproj, if using .NET Core CLI. The app will take input from the user interactively and send the data to the Personalizer instance. 41 | -------------------------------------------------------------------------------- /dotnet/Personalizer/multislot-quickstart-v2PreviewSdk/multislot-quickstart.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | net5.0 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dotnet/Personalizer/singleslot-quickstart-v1Sdk/README.md: -------------------------------------------------------------------------------- 1 | # About this Quickstart 2 | 3 | This interactive sample takes the time of day and the users's taste preference as context, and sends it to an Azure Personalizer instance, which then returns the top personalized food choice, along with the recommendation probability distribution of each food item. The user then inputs whether or not Personalizer predicted correctly, which is data used to improve Personalizer's prediction model. 4 | 5 | # To try this sample 6 | 7 | ## Prerequisites 8 | 9 | The solution is a C# .NET Core console app project, so you will need [.NET Core 3.1](https://dotnet.microsoft.com/download/dotnet-core/3.1), and [Visual Studio 2019](https://visualstudio.microsoft.com/vs/) or [.NET Core CLI](https://docs.microsoft.com/en-us/dotnet/core/tools/). 10 | 11 | ## Set up the sample 12 | 13 | - Clone the Azure Personalizer Samples repo. 14 | 15 | ```bash 16 | git clone https://github.com/Azure-Samples/cognitive-services-quickstart-code.git 17 | ``` 18 | 19 | - Navigate to _dotnet/Personalizer/singleslot-quickstart-v1Sdk_. 20 | 21 | - Open `PersonalizerExample.sln`, if using Visual Studio. 22 | 23 | ## Set up Azure Personalizer Service 24 | 25 | - Create a Personalizer instance in the Azure portal. 26 | 27 | - Set script variables **ApiKey** and **ServiceEndpoint**. These values can be found in your Cognitive Services Quick start tab in the Azure portal. 28 | 29 | ## Run the sample 30 | 31 | Build and run the sample by pressing **F5** in Visual Studio, or `dotnet buld` then `dotnet run` in the same directory as PersonalizerExample.csproj, if using .NET Core CLI. The app will take input from the user interactively and send the data to the Personalizer instance. 32 | -------------------------------------------------------------------------------- /dotnet/Personalizer/singleslot-quickstart-v1Sdk/personalizer.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dotnet/Personalizer/singleslot-quickstart-v2PreviewSdk/README.md: -------------------------------------------------------------------------------- 1 | # About this Quickstart 2 | 3 | This interactive sample takes the time of day and the users's taste preference as context, and sends it to an Azure Personalizer instance, which then returns the top personalized food choice, along with the recommendation probability distribution of each food item. The user then inputs whether or not Personalizer predicted correctly, which is data used to improve Personalizer's prediction model. 4 | 5 | # To try this sample 6 | 7 | ## Prerequisites 8 | 9 | The solution is a C# .NET console app project, so you will need [.NET 5.0](https://dotnet.microsoft.com/download/dotnet/5.0), and [Visual Studio 2019](https://visualstudio.microsoft.com/vs/). 10 | 11 | ## Set up the sample 12 | 13 | - Clone the Azure Personalizer Samples repo. 14 | 15 | ```bash 16 | git clone https://github.com/Azure-Samples/cognitive-services-quickstart-code.git 17 | ``` 18 | 19 | - Navigate to _dotnet/Personalizer/singleslot-quickstart-v2PreviewSdk_. 20 | 21 | - Open `PersonalizerExample.sln`, if using Visual Studio. 22 | 23 | ## Set up Azure Personalizer Service 24 | 25 | - Create a Personalizer instance in the Azure portal. 26 | 27 | - Set script variables **ApiKey** and **ServiceEndpoint**. These values can be found in your Cognitive Services Quick start tab in the Azure portal. 28 | 29 | ## Run the sample 30 | 31 | Build and run the sample by pressing **F5** in Visual Studio, or `dotnet buld` then `dotnet run` in the same directory as PersonalizerExample.csproj, if using .NET Core CLI. The app will take input from the user interactively and send the data to the Personalizer instance. 32 | -------------------------------------------------------------------------------- /dotnet/Personalizer/singleslot-quickstart-v2PreviewSdk/personalizer.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | net5.0 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dotnet/QnAMaker/Preview-sdk-based-quickstart/console.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dotnet/QnAMaker/SDK-based-quickstart/console.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dotnet/QnAMaker/rest-based-quickstart/rest-based-quickstart.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp2.1 6 | rest_based_quickstart 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /dotnet/QnAMaker/rest/publish-kb.cs: -------------------------------------------------------------------------------- 1 | // 2 | using System; 3 | using System.Net.Http; 4 | // 5 | 6 | namespace QnAMakerPublishQuickstart 7 | { 8 | class Program 9 | { 10 | // 11 | private static readonly string subscriptionKey = "PASTE_YOUR_QNA_MAKER_AUTHORING_SUBSCRIPTION_KEY_HERE"; 12 | private static readonly string endpoint = "PASTE_YOUR_QNA_MAKER_AUTHORING_ENDPOINT_HERE"; 13 | private static readonly string kbId = "PASTE_YOUR_QNA_MAKER_KB_ID_HERE"; 14 | // 15 | 16 | static void Main(string[] args) 17 | { 18 | // 19 | var uri = endpoint + "/qnamaker/v4.0/knowledgebases/" + kbId; 20 | 21 | using (var client = new HttpClient()) 22 | using (var request = new HttpRequestMessage()) 23 | { 24 | request.Method = HttpMethod.Post; 25 | request.RequestUri = new Uri(uri); 26 | request.Headers.Add("Ocp-Apim-Subscription-Key", subscriptionKey); 27 | 28 | // Send request to Azure service, get response 29 | // returns 204 with no content 30 | var response = client.SendAsync(request).Result; 31 | 32 | Console.WriteLine("KB published successfully? " + response.IsSuccessStatusCode); 33 | 34 | Console.WriteLine("Press any key to continue."); 35 | Console.ReadKey(); 36 | } 37 | // 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /dotnet/QnAMaker/sdk/ConsoleApp1.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp3.1 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /go/AnomalyDetector/request-data.csv: -------------------------------------------------------------------------------- 1 | 2018-03-01T00:00:00Z,32858923 2 | 2018-03-02T00:00:00Z,29615278 3 | 2018-03-03T00:00:00Z,22839355 4 | 2018-03-04T00:00:00Z,25948736 5 | 2018-03-05T00:00:00Z,34139159 6 | 2018-03-06T00:00:00Z,33843985 7 | 2018-03-07T00:00:00Z,33637661 8 | 2018-03-08T00:00:00Z,32627350 9 | 2018-03-09T00:00:00Z,29881076 10 | 2018-03-10T00:00:00Z,22681575 11 | 2018-03-11T00:00:00Z,24629393 12 | 2018-03-12T00:00:00Z,34010679 13 | 2018-03-13T00:00:00Z,33893888 14 | 2018-03-14T00:00:00Z,33760076 15 | 2018-03-15T00:00:00Z,33093515 16 | 2018-03-16T00:00:00Z,29945555 17 | 2018-03-17T00:00:00Z,22676212 18 | 2018-03-18T00:00:00Z,25262514 19 | 2018-03-19T00:00:00Z,33631649 20 | 2018-03-20T00:00:00Z,34468310 21 | 2018-03-21T00:00:00Z,34212281 22 | 2018-03-22T00:00:00Z,38144434 23 | 2018-03-23T00:00:00Z,34662949 24 | 2018-03-24T00:00:00Z,24623684 25 | 2018-03-25T00:00:00Z,26530491 26 | 2018-03-26T00:00:00Z,35445003 27 | 2018-03-27T00:00:00Z,34250789 28 | 2018-03-28T00:00:00Z,33423012 29 | 2018-03-29T00:00:00Z,30744783 30 | 2018-03-30T00:00:00Z,25825128 31 | 2018-03-31T00:00:00Z,21244209 32 | 2018-04-01T00:00:00Z,22576956 33 | 2018-04-02T00:00:00Z,31957221 34 | 2018-04-03T00:00:00Z,33841228 35 | 2018-04-04T00:00:00Z,33554483 36 | 2018-04-05T00:00:00Z,32383350 37 | 2018-04-06T00:00:00Z,29494850 38 | 2018-04-07T00:00:00Z,22815534 39 | 2018-04-08T00:00:00Z,25557267 40 | 2018-04-09T00:00:00Z,34858252 41 | 2018-04-10T00:00:00Z,34750597 42 | 2018-04-11T00:00:00Z,34717956 43 | 2018-04-12T00:00:00Z,34132534 44 | 2018-04-13T00:00:00Z,30762236 45 | 2018-04-14T00:00:00Z,22504059 46 | 2018-04-15T00:00:00Z,26149060 47 | 2018-04-16T00:00:00Z,35250105 -------------------------------------------------------------------------------- /go/BingAutoSuggest/BingAutoSuggestQuickstart.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "github.com/Azure/azure-sdk-for-go/services/cognitiveservices/v1.0/autosuggest" 7 | "github.com/Azure/go-autorest/autorest" 8 | "log" 9 | "os" 10 | ) 11 | 12 | func main() { 13 | var subscription_key string = "PASTE_YOUR_AUTO_SUGGEST_SUBSCRIPTION_KEY_HERE" 14 | var endpoint string = "PASTE_YOUR_AUTO_SUGGEST_ENDPOINT_HERE" 15 | 16 | // Get the context, which is required by the SDK methods. 17 | ctx := context.Background() 18 | 19 | client := autosuggest.New() 20 | // Set the subscription key on the client. 21 | client.Authorizer = autorest.NewCognitiveServicesAuthorizer(subscription_key) 22 | client.Endpoint = endpoint 23 | 24 | // This should return the query suggestion "xbox." 25 | result, err := client.AutoSuggest (ctx, "xb", "", "", "", "", "", "", "", "", "", "", []autosuggest.ResponseFormat{"Json"}) 26 | if nil != err { 27 | log.Fatal(err) 28 | } 29 | 30 | groups := *result.SuggestionGroups 31 | if len(groups) > 0 { 32 | group, _ := groups[0].AsSuggestionsSuggestionGroup() 33 | fmt.Printf ("First suggestion group: %s\n", (*group).Name) 34 | suggestions := *(*group).SearchSuggestions 35 | if len(suggestions) > 0 { 36 | fmt.Println("First suggestion:") 37 | fmt.Printf("Query: %s\n", *suggestions[0].Query) 38 | fmt.Printf("Display text: %s\n", *suggestions[0].DisplayText) 39 | } else { 40 | fmt.Println("No suggestions found in this group.") 41 | } 42 | } else { 43 | fmt.Println("No suggestions found.") 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /go/BingCustomSearch/quickstart.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "github.com/Azure/azure-sdk-for-go/services/cognitiveservices/v1.0/customsearch" 7 | "github.com/Azure/go-autorest/autorest" 8 | "log" 9 | "os" 10 | ) 11 | 12 | func main() { 13 | var subscription_key string = "PASTE_YOUR_CUSTOM_SEARCH_SUBSCRIPTION_KEY_HERE" 14 | var search_instance_id string = "PASTE_YOUR_CUSTOM_SEARCH_INSTANCE_ID_HERE" 15 | 16 | // Get the context, which is required by the SDK methods. 17 | ctx := context.Background() 18 | 19 | client := customsearch.NewCustomInstanceClient() 20 | // Set the subscription key on the client. 21 | client.Authorizer = autorest.NewCognitiveServicesAuthorizer(subscription_key) 22 | 23 | result, err := client.Search (ctx, search_instance_id, "xbox", "", "", "", "", "", "", nil, "", nil, "", "", nil, "") 24 | if nil != err { 25 | log.Fatal(err) 26 | } 27 | 28 | var web_pages = *result.WebPages 29 | fmt.Printf("Estimated total results: %d.\n", *web_pages.TotalEstimatedMatches) 30 | var results = *web_pages.Value 31 | if len(results) > 0 { 32 | fmt.Println("First 10 results follow.\n") 33 | for i := 0; i < 10 && i < len(results); i++ { 34 | fmt.Println("Title: " + *results[i].Name) 35 | fmt.Println("URL: " + *results[i].URL) 36 | fmt.Println() 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /go/ContentModerator/content_moderator_text_moderation.txt: -------------------------------------------------------------------------------- 1 | "Is this a grabage email abcdef@abcd.com, phone: 6657789887, IP: 255.255.255.255, 1 Microsoft Way, Redmond, WA 98052. Crap is the profanity here. Is this information PII? phone 3144444444" 2 | -------------------------------------------------------------------------------- /go/Face/rest/detect.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "crypto/tls" 5 | "encoding/json" 6 | "fmt" 7 | "io/ioutil" 8 | "net/http" 9 | "os" 10 | "strings" 11 | "time" 12 | ) 13 | 14 | func main() { 15 | subscriptionKey := "PASTE_YOUR_FACE_SUBSCRIPTION_KEY_HERE" 16 | endpoint := "PASTE_YOUR_FACE_ENDPOINT_HERE" 17 | 18 | const imageUrl = 19 | "https://upload.wikimedia.org/wikipedia/commons/c/c3/RH_Louise_Lillian_Gish.jpg" 20 | 21 | const params = "?detectionModel=detection_03" 22 | uri := endpoint + "/face/v1.0/detect" + params 23 | const imageUrlEnc = "{\"url\":\"" + imageUrl + "\"}" 24 | 25 | reader := strings.NewReader(imageUrlEnc) 26 | 27 | //Configure TLS, etc. 28 | tr := &http.Transport{ 29 | TLSClientConfig: &tls.Config{ 30 | InsecureSkipVerify: true, 31 | }, 32 | } 33 | 34 | // Create the Http client 35 | client := &http.Client{ 36 | Transport: tr, 37 | Timeout: time.Second * 2, 38 | } 39 | 40 | // Create the Post request, passing the image URL in the request body 41 | req, err := http.NewRequest("POST", uri, reader) 42 | if err != nil { 43 | panic(err) 44 | } 45 | 46 | // Add headers 47 | req.Header.Add("Content-Type", "application/json") 48 | req.Header.Add("Ocp-Apim-Subscription-Key", subscriptionKey) 49 | 50 | // Send the request and retrieve the response 51 | resp, err := client.Do(req) 52 | if err != nil { 53 | panic(err) 54 | } 55 | 56 | defer resp.Body.Close() 57 | 58 | // Read the response body. 59 | // Note, data is a byte array 60 | data, err := ioutil.ReadAll(resp.Body) 61 | if err != nil { 62 | panic(err) 63 | } 64 | 65 | // Parse the Json data 66 | var f interface{} 67 | json.Unmarshal(data, &f) 68 | 69 | // Format and display the Json result 70 | jsonFormatted, _ := json.MarshalIndent(f, "", " ") 71 | fmt.Println(string(jsonFormatted)) 72 | } 73 | -------------------------------------------------------------------------------- /go/LUIS/go-rest-predict/predict.go: -------------------------------------------------------------------------------- 1 | // 2 | // This quickstart shows how to predict the intent of an utterance by using the LUIS REST APIs. 3 | // 4 | 5 | package main 6 | 7 | // Import dependencies. 8 | import ( 9 | "fmt" 10 | "net/http" 11 | "net/url" 12 | "io/ioutil" 13 | "log" 14 | ) 15 | 16 | func main() { 17 | 18 | ////////// 19 | // Values to modify. 20 | 21 | // YOUR-APP-ID: The App ID GUID found on the www.luis.ai Application Settings page. 22 | var appID = "PASTE_YOUR_LUIS_APP_ID_HERE" 23 | 24 | // YOUR-PREDICTION-KEY: Your LUIS authoring key, 32 character value. 25 | var predictionKey = "PASTE_YOUR_LUIS_PREDICTION_SUBSCRIPTION_KEY_HERE" 26 | 27 | // YOUR-PREDICTION-ENDPOINT: Replace with your authoring key endpoint. 28 | // For example, "https://westus.api.cognitive.microsoft.com/" 29 | var predictionEndpoint = "PASTE_YOUR_LUIS_PREDICTION_ENDPOINT_HERE" 30 | 31 | // utterance for public app 32 | var utterance = "I want two large pepperoni pizzas on thin crust please" 33 | ////////// 34 | 35 | // Call the prediction endpoint. 36 | endpointPrediction(appID, predictionKey, predictionEndpoint, utterance) 37 | } 38 | 39 | // Calls the prediction endpoint and displays the prediction results on the console. 40 | func endpointPrediction(appID string, predictionKey string, predictionEndpoint string, utterance string) { 41 | 42 | var endpointUrl = fmt.Sprintf("%sluis/prediction/v3.0/apps/%s/slots/production/predict?subscription-key=%s&verbose=true&show-all-intents=true&query=%s", predictionEndpoint, appID, predictionKey, url.QueryEscape(utterance)) 43 | 44 | response, err := http.Get(endpointUrl) 45 | 46 | if err != nil { 47 | // handle error 48 | fmt.Println("error from Get") 49 | log.Fatal(err) 50 | } 51 | 52 | response2, err2 := ioutil.ReadAll(response.Body) 53 | 54 | if err2 != nil { 55 | // handle error 56 | fmt.Println("error from ReadAll") 57 | log.Fatal(err2) 58 | } 59 | 60 | fmt.Println("response") 61 | fmt.Println(string(response2)) 62 | } -------------------------------------------------------------------------------- /go/QnAMaker/rest/publish-kb.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | // 4 | import ( 5 | "bytes" 6 | "fmt" 7 | "net/http" 8 | ) 9 | // 10 | 11 | //
12 | func main() { 13 | /* 14 | * Set the `subscription_key` and `endpoint` variables to your 15 | * QnA Maker authoring subscription key and endpoint. 16 | * 17 | * These values can be found in the Azure portal (ms.portal.azure.com/). 18 | * Look up your QnA Maker resource. Then, in the "Resource management" 19 | * section, find the "Keys and Endpoint" page. 20 | * 21 | * The value of `endpoint` has the format https://YOUR-RESOURCE-NAME.cognitiveservices.azure.com. 22 | * 23 | * Set the `kb_id` variable to the ID of a knowledge base you have 24 | * previously created. 25 | */ 26 | var subscription_key string = "PASTE_YOUR_QNA_MAKER_AUTHORING_SUBSCRIPTION_KEY_HERE" 27 | var endpoint string = "PASTE_YOUR_QNA_MAKER_AUTHORING_ENDPOINT_HERE" 28 | var kb_id string = "PASTE_YOUR_QNA_MAKER_KB_ID_HERE" 29 | 30 | var service string = "/qnamaker/v4.0" 31 | var method string = "/knowledgebases/" 32 | var uri = endpoint + service + method + kb_id 33 | 34 | var content = bytes.NewBuffer([]byte(nil)); 35 | 36 | req, _ := http.NewRequest("POST", uri, content) 37 | 38 | req.Header.Add("Ocp-Apim-Subscription-Key", subscription_key) 39 | 40 | client := &http.Client{} 41 | response, err := client.Do(req) 42 | if err != nil { 43 | panic(err) 44 | } 45 | // print 204 - success code 46 | fmt.Println(response.StatusCode) 47 | } 48 | //
49 | -------------------------------------------------------------------------------- /go/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/go/README.md -------------------------------------------------------------------------------- /go/TextAnalytics/REST/AnalyzeSentiment.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "io/ioutil" 7 | "log" 8 | "net/http" 9 | "os" 10 | "strings" 11 | "time" 12 | ) 13 | 14 | func main() { 15 | var subscriptionKey string = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 16 | var endpoint string = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 17 | 18 | const uriPath = "/text/analytics/v3.0/sentiment" 19 | 20 | var uri = endpoint + uriPath 21 | 22 | data := []map[string]string{ 23 | {"id": "1", "language": "en", "text": "I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable."}, 24 | {"id": "2", "language": "es", "text": "Este ha sido un dia terrible, llegué tarde al trabajo debido a un accidente automobilistico."}, 25 | } 26 | 27 | documents, err := json.Marshal(&data) 28 | if err != nil { 29 | fmt.Printf("Error marshaling data: %v\n", err) 30 | return 31 | } 32 | 33 | r := strings.NewReader("{\"documents\": " + string(documents) + "}") 34 | 35 | client := &http.Client{ 36 | Timeout: time.Second * 2, 37 | } 38 | 39 | req, err := http.NewRequest("POST", uri, r) 40 | if err != nil { 41 | fmt.Printf("Error creating request: %v\n", err) 42 | return 43 | } 44 | 45 | req.Header.Add("Content-Type", "application/json") 46 | req.Header.Add("Ocp-Apim-Subscription-Key", subscriptionKey) 47 | 48 | resp, err := client.Do(req) 49 | if err != nil { 50 | fmt.Printf("Error on request: %v\n", err) 51 | return 52 | } 53 | defer resp.Body.Close() 54 | 55 | body, err := ioutil.ReadAll(resp.Body) 56 | if err != nil { 57 | fmt.Printf("Error reading response body: %v\n", err) 58 | return 59 | } 60 | 61 | var f interface{} 62 | json.Unmarshal(body, &f) 63 | 64 | jsonFormatted, err := json.MarshalIndent(f, "", " ") 65 | if err != nil { 66 | fmt.Printf("Error producing JSON: %v\n", err) 67 | return 68 | } 69 | fmt.Println(string(jsonFormatted)) 70 | } -------------------------------------------------------------------------------- /go/TextAnalytics/REST/DetectLanguage.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "io/ioutil" 7 | "log" 8 | "net/http" 9 | "os" 10 | "strings" 11 | "time" 12 | ) 13 | 14 | func main() { 15 | 16 | var subscriptionKey string = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 17 | var endpoint string = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 18 | const uriPath = "/text/analytics/v3.0/languages" 19 | 20 | var uri = endpoint + uriPath 21 | 22 | data := []map[string]string{ 23 | {"id": "1", "text": "This is a document written in English."}, 24 | {"id": "2", "text": "Este es un document escrito en Español."}, 25 | {"id": "3", "text": "这是一个用中文写的文件"}, 26 | } 27 | 28 | documents, err := json.Marshal(&data) 29 | if err != nil { 30 | fmt.Printf("Error marshaling data: %v\n", err) 31 | return 32 | } 33 | 34 | r := strings.NewReader("{\"documents\": " + string(documents) + "}") 35 | 36 | client := &http.Client{ 37 | Timeout: time.Second * 2, 38 | } 39 | 40 | req, err := http.NewRequest("POST", uri, r) 41 | if err != nil { 42 | fmt.Printf("Error creating request: %v\n", err) 43 | return 44 | } 45 | 46 | req.Header.Add("Content-Type", "application/json") 47 | req.Header.Add("Ocp-Apim-Subscription-Key", subscriptionKey) 48 | 49 | resp, err := client.Do(req) 50 | if err != nil { 51 | fmt.Printf("Error on request: %v\n", err) 52 | return 53 | } 54 | defer resp.Body.Close() 55 | 56 | body, err := ioutil.ReadAll(resp.Body) 57 | if err != nil { 58 | fmt.Printf("Error reading response body: %v\n", err) 59 | return 60 | } 61 | 62 | var f interface{} 63 | json.Unmarshal(body, &f) 64 | 65 | jsonFormatted, err := json.MarshalIndent(f, "", " ") 66 | if err != nil { 67 | fmt.Printf("Error producing JSON: %v\n", err) 68 | return 69 | } 70 | fmt.Println(string(jsonFormatted)) 71 | } -------------------------------------------------------------------------------- /go/TextAnalytics/REST/ExtractKeyPhrases.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "io/ioutil" 7 | "log" 8 | "net/http" 9 | "os" 10 | "strings" 11 | "time" 12 | ) 13 | 14 | func main() { 15 | var subscriptionKey string = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 16 | var endpoint string = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 17 | 18 | const uriPath = "/text/analytics/v3.0/keyPhrases" 19 | 20 | var uri = endpoint + uriPath 21 | 22 | data := []map[string]string{ 23 | {"id": "1", "language": "en", "text": "I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable."}, 24 | {"id": "2", "language": "es", "text": "Si usted quiere comunicarse con Carlos, usted debe de llamarlo a su telefono movil. Carlos es muy responsable, pero necesita recibir una notificacion si hay algun problema."}, 25 | {"id": "3", "language": "en", "text": "The Grand Hotel is a new hotel in the center of Seattle. It earned 5 stars in my review, and has the classiest decor I've ever seen."}, 26 | } 27 | 28 | documents, err := json.Marshal(&data) 29 | if err != nil { 30 | fmt.Printf("Error marshaling data: %v\n", err) 31 | return 32 | } 33 | 34 | r := strings.NewReader("{\"documents\": " + string(documents) + "}") 35 | 36 | client := &http.Client{ 37 | Timeout: time.Second * 2, 38 | } 39 | 40 | req, err := http.NewRequest("POST", uri, r) 41 | if err != nil { 42 | fmt.Printf("Error creating request: %v\n", err) 43 | return 44 | } 45 | 46 | req.Header.Add("Content-Type", "application/json") 47 | req.Header.Add("Ocp-Apim-Subscription-Key", subscriptionKey) 48 | 49 | resp, err := client.Do(req) 50 | if err != nil { 51 | fmt.Printf("Error on request: %v\n", err) 52 | return 53 | } 54 | defer resp.Body.Close() 55 | 56 | body, err := ioutil.ReadAll(resp.Body) 57 | if err != nil { 58 | fmt.Printf("Error reading response body: %v\n", err) 59 | return 60 | } 61 | 62 | var f interface{} 63 | json.Unmarshal(body, &f) 64 | 65 | jsonFormatted, err := json.MarshalIndent(f, "", " ") 66 | if err != nil { 67 | fmt.Printf("Error producing JSON: %v\n", err) 68 | return 69 | } 70 | fmt.Println(string(jsonFormatted)) 71 | } -------------------------------------------------------------------------------- /go/TextAnalytics/REST/IdentifyEntities.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "io/ioutil" 7 | "log" 8 | "net/http" 9 | "os" 10 | "strings" 11 | "time" 12 | ) 13 | 14 | func main() { 15 | 16 | var subscriptionKey string = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 17 | var endpoint string = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 18 | 19 | const uriPath = "/text/analytics/v3.0/entities/recognition/general" 20 | 21 | var uri = endpoint + uriPath 22 | 23 | data := []map[string]string{ 24 | {"id": "1", "language": "en", "text": "Microsoft is an It company."}, 25 | } 26 | 27 | documents, err := json.Marshal(&data) 28 | if err != nil { 29 | fmt.Printf("Error marshaling data: %v\n", err) 30 | return 31 | } 32 | 33 | r := strings.NewReader("{\"documents\": " + string(documents) + "}") 34 | 35 | client := &http.Client{ 36 | Timeout: time.Second * 2, 37 | } 38 | 39 | req, err := http.NewRequest("POST", uri, r) 40 | if err != nil { 41 | fmt.Printf("Error creating request: %v\n", err) 42 | return 43 | } 44 | 45 | req.Header.Add("Content-Type", "application/json") 46 | req.Header.Add("Ocp-Apim-Subscription-Key", subscriptionKey) 47 | 48 | resp, err := client.Do(req) 49 | if err != nil { 50 | fmt.Printf("Error on request: %v\n", err) 51 | return 52 | } 53 | defer resp.Body.Close() 54 | 55 | body, err := ioutil.ReadAll(resp.Body) 56 | if err != nil { 57 | fmt.Printf("Error reading response body: %v\n", err) 58 | return 59 | } 60 | 61 | var f interface{} 62 | json.Unmarshal(body, &f) 63 | 64 | jsonFormatted, err := json.MarshalIndent(f, "", " ") 65 | if err != nil { 66 | fmt.Printf("Error producing JSON: %v\n", err) 67 | return 68 | } 69 | fmt.Println(string(jsonFormatted)) 70 | } -------------------------------------------------------------------------------- /java/AutoSuggest/Quickstart.java: -------------------------------------------------------------------------------- 1 | import com.microsoft.azure.cognitiveservices.search.autosuggest.*; 2 | import com.microsoft.azure.cognitiveservices.search.autosuggest.models.*; 3 | 4 | import java.io.*; 5 | import java.lang.Object.*; 6 | import java.util.*; 7 | import java.net.*; 8 | 9 | /** 10 | * This Azure Cognitive Services Bing Autosuggest API quickstart shows how to 11 | * get search suggestions for a given string query. 12 | * 13 | * Download all Maven dependencies from command line into your project folder: 14 | * mvn clean dependency:copy-dependencies 15 | * 16 | * To compile and run, enter the following at a command prompt: 17 | * javac Quickstart.java -cp .;lib\* 18 | * java -cp .;lib\* Quickstart 19 | * This presumes your libraries are stored in a folder named "lib" in your project 20 | * folder. If not, please adjust the -classpath (-cp) value accordingly. 21 | */ 22 | 23 | public class Quickstart { 24 | private static String subscription_key = "PASTE_YOUR_AUTO_SUGGEST_SUBSCRIPTION_KEY_HERE"; 25 | 26 | BingAutoSuggestSearchAPI client = BingAutoSuggestSearchManager.authenticate(subscription_key); 27 | 28 | public void get_suggestions() { 29 | Suggestions suggestions = client.bingAutoSuggestSearch().autoSuggest().withQuery("sail").execute(); 30 | if (suggestions != null && suggestions.suggestionGroups() != null && suggestions.suggestionGroups().size() > 0) { 31 | SuggestionsSuggestionGroup group = suggestions.suggestionGroups().get(0); 32 | System.out.println("First suggestion group: " + group.name()); 33 | System.out.println("Suggestions:"); 34 | for (SearchAction suggestion: group.searchSuggestions()) { 35 | System.out.println("Query: " + suggestion.query()); 36 | System.out.println("Text: " + suggestion.displayText()); 37 | System.out.println("URL: " + suggestion.url()); 38 | System.out.println("Kind: " + suggestion.searchKind()); 39 | System.out.println(); 40 | } 41 | } else { 42 | System.out.println("No suggestions found."); 43 | } 44 | } 45 | 46 | public static void main(String[] args) { 47 | try { 48 | Quickstart quickstart = new Quickstart(); 49 | quickstart.get_suggestions(); 50 | } catch (Exception e) { 51 | System.out.println(e.getMessage()); 52 | e.printStackTrace(); 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /java/AutoSuggest/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 10 | 4.0.0 11 | 12 | com.foo 13 | bar 14 | 0.0.1-SNAPSHOT 15 | 16 | 17 | 18 | 19 | 20 | com.microsoft.azure.cognitiveservices 21 | azure-cognitiveservices-autosuggest 22 | 1.0.2-beta 23 | 24 | 25 | 30 | 31 | org.slf4j 32 | slf4j-jdk14 33 | 1.7.25 34 | 35 | 36 | 37 | 38 | 39 | lib 40 | 41 | 42 | org.codehaus.mojo 43 | exec-maven-plugin 44 | 1.4.0 45 | 46 | Quickstart 47 | 48 | 49 | 50 | org.apache.maven.plugins 51 | maven-dependency-plugin 52 | 53 | 54 | ${project.build.directory} 55 | 56 | 57 | 58 | 59 | org.apache.maven.plugins 60 | maven-compiler-plugin 61 | 3.1 62 | 63 | 13 64 | 13 65 | 66 | 67 | 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /java/BingSpellCheck/README.md: -------------------------------------------------------------------------------- 1 | --- 2 | services: cognitive-services, bing-spell-check 3 | platforms: java 4 | author: wiazur 5 | --- 6 | 7 | # Bing Spell Check Quickstart 8 | 9 | This quickstart checks the spelling for the query "Bill Gatas" (with market and mode settings) and print outs the flagged tokens and spelling correction suggestions. 10 | 11 | ## Getting Started 12 | 13 | ### Prerequisites 14 | - A Bing Spell Check subscription key. If you don't have one, you can visit [the Microsoft Cognitive Services Web site](https://azure.microsoft.com/free/cognitive-services/), create a new Azure account, and try Cognitive Services for free. 15 | - Set an environment variable named BING_SPELL_CHECK_SUBSCRIPTION_KEY with your Bing Spell Check subscription key in the quickstart. 16 | 17 | ### Clone and run 18 | 19 | Execute the following from a command line: 20 | 21 | 1. `git clone https://github.com/Azure-Samples/cognitive-services-quickstart-code.git` 22 | 1. `cd cognitive-services-quickstart-code/java/BingSpellCheck` 23 | 1. `mvn compile exec:java cleanupDaemonThreads = false` 24 | 25 | ## More information 26 | 27 | - [Build and deploy Java apps on Azure](http://azure.com/java) 28 | - [The Java SDK reference](https://docs.microsoft.com/en-us/java/api/overview/azure/cognitiveservices/client?view=azure-java-stable) 29 | - [Bing Spell Check documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-spell-check/index) 30 | 31 | --- 32 | 33 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 34 | -------------------------------------------------------------------------------- /java/ComputerVision/4-0/pom.xml: -------------------------------------------------------------------------------- 1 | 4 | 4.0.0 5 | com.example 6 | my-application-name 7 | 1.0.0 8 | 9 | 10 | 11 | com.azure 12 | azure-ai-vision-imageanalysis 13 | 1.0.0-beta.2 14 | 15 | 16 | 20 | 21 | org.slf4j 22 | slf4j-nop 23 | 1.7.36 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /java/ComputerVision/4-0/sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/ComputerVision/4-0/sample.png -------------------------------------------------------------------------------- /java/ComputerVision/README.md: -------------------------------------------------------------------------------- 1 | --- 2 | services: cognitive-services, computer-vision 3 | platforms: java 4 | author: wiazur 5 | --- 6 | 7 | # Computer Vision SDK Quickstart 8 | 9 | This quickstart uses image classification and object detection on an image with the Computer Vision Cognitive Service. It will retrieve and print information (including text) from the image. Maven is used. 10 | 11 | ## Getting Started 12 | 13 | ### Prerequisites 14 | - If you don't have a Microsoft Azure subscription, you can visit [the Microsoft Cognitive Services Web site](https://azure.microsoft.com/free/cognitive-services/), create a new Azure account, and try Cognitive Services for free. 15 | - Get an [Azure Computer Vision](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/) account to get your key and endpoint. 16 | - Add COMPUTER_VISION_KEY and COMPUTER_VISION_ENDPOINT to your environment variables with your key and endpoint as values. 17 | - After cloning this repo (see below), create a 'resources' folder in your 'src/main/' folder. 18 | - Add images **landmark.jpg** and **printed_text.jpg** to it, downloaded locally from here: 19 | https://github.com/Azure-Samples/cognitive-services-sample-data-files/tree/master/ComputerVision/Images 20 | 21 | ### Clone and run 22 | 23 | Execute from the command line: 24 | 25 | 1. `git clone https://github.com/Azure-Samples/cognitive-services-quickstart-code.git` 26 | 1. `cd cognitive-services-quickstart-code/java/ComputerVision` 27 | 1. `mvn compile exec:java -Dexec.cleanupDaemonThreads=false` 28 | 29 | ## More information 30 | 31 | - [Build and deploy Java apps on Azure](http://azure.com/java) 32 | - The [Computer Vision documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/computer-vision/index) 33 | 34 | --- 35 | 36 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 37 | -------------------------------------------------------------------------------- /java/ContentModerator/src/main/resources/ImageFiles.txt: -------------------------------------------------------------------------------- 1 | https://moderatorsampleimages.blob.core.windows.net/samples/sample.jpg 2 | https://moderatorsampleimages.blob.core.windows.net/samples/sample5.png 3 | -------------------------------------------------------------------------------- /java/ContentModerator/src/main/resources/TextModeration.txt: -------------------------------------------------------------------------------- 1 | "Is this a grabage email abcdef@abcd.com, phone: 6657789887, IP: 255.255.255.255, 1 Microsoft Way, Redmond, WA 98052. Crap is the profanity here. Is this information PII? phone 3144444444" -------------------------------------------------------------------------------- /java/ContentModerator/target/classes/ImageFiles.txt: -------------------------------------------------------------------------------- 1 | https://moderatorsampleimages.blob.core.windows.net/samples/sample.jpg 2 | https://moderatorsampleimages.blob.core.windows.net/samples/sample5.png 3 | -------------------------------------------------------------------------------- /java/ContentModerator/target/classes/TextModeration.txt: -------------------------------------------------------------------------------- 1 | "Is this a grabage email abcdef@abcd.com, phone: 6657789887, IP: 255.255.255.255, 1 Microsoft Way, Redmond, WA 98052. Crap is the profanity here. Is this information PII? phone 3144444444" -------------------------------------------------------------------------------- /java/Face/cognsvcsdk/dependencies.txt: -------------------------------------------------------------------------------- 1 | Include these jar libraries in a lib folder, in the root of your project. 2 | They are dependencies for the FaceAPI SDK and AutoRest libraries. 3 | Avoid any alpha or beta versions. 4 | If specific versions are noted below, those are older versions that work better than the most recent, at the time of this commit. 5 | 6 | * https://mvnrepository.com/artifact/com.microsoft.azure/azure-client-runtime 7 | * https://mvnrepository.com/artifact/com.microsoft.rest/client-runtime 8 | * https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-joda 9 | * https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations 10 | * https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core 11 | * https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind 12 | * https://mvnrepository.com/artifact/joda-time/joda-time 13 | * https://mvnrepository.com/artifact/com.google.guava/guava 14 | * https://mvnrepository.com/artifact/com.microsoft.azure/azure-annotations 15 | * https://mvnrepository.com/artifact/com.squareup.okhttp3/okhttp/3.14.2 16 | * https://mvnrepository.com/artifact/com.squareup.okio/okio/1.17.4 17 | * https://mvnrepository.com/artifact/com.squareup.okhttp3/logging-interceptor 18 | * https://mvnrepository.com/artifact/com.squareup.okhttp3/okhttp-urlconnection/3.14.2 19 | * https://mvnrepository.com/artifact/com.squareup.retrofit2/retrofit 20 | * https://mvnrepository.com/artifact/com.squareup.retrofit2/converter-jackson 21 | * https://mvnrepository.com/artifact/com.squareup.retrofit2/adapter-rxjava 22 | * https://mvnrepository.com/artifact/io.reactivex/rxjava 23 | * https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 24 | * https://mvnrepository.com/artifact/org.slf4j/slf4j-api 25 | * https://mvnrepository.com/artifact/org.slf4j/slf4j-jdk14/1.7.28 26 | -------------------------------------------------------------------------------- /java/Face/cognsvcsdk/faceapi-dependencies.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/Face/cognsvcsdk/faceapi-dependencies.jar -------------------------------------------------------------------------------- /java/Face/cognsvcsdk/ms-azure-cs-faceapi.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/Face/cognsvcsdk/ms-azure-cs-faceapi.jar -------------------------------------------------------------------------------- /java/Face/pom.xml: -------------------------------------------------------------------------------- 1 | 4 | 4.0.0 5 | com.example 6 | my-application-name 7 | 1.0.0 8 | 9 | 10 | 11 | com.azure 12 | azure-ai-vision-face 13 | 1.0.0-beta.2 14 | 15 | 16 | -------------------------------------------------------------------------------- /java/FormRecognizer/how-to-guide/business-card-model-output.md: -------------------------------------------------------------------------------- 1 | --------Analyzing business card 0 ----------- 2 | 3 | Contact name: Chris Smith 4 | 5 |         First Name: Chris, confidence: 0.99 6 | 7 |         Last Name: Smith, confidence: 0.99 8 | 9 | Job Title: Senior Researcher, confidence: 0.99 10 | 11 | Department: Cloud & AI Department, confidence: 0.97 12 | 13 | Email: chris.smith@contoso.com, confidence: 0.99 14 | 15 | Web site: https://www.contoso.com/, confidence: 0.99 16 | 17 | Mobile phone number: +19871234567, confidence: 0.99 18 | 19 | Fax phone number: +19873126745, confidence: 0.99 20 | 21 | Company name: CONTOSO, confidence: 0.40 22 | -------------------------------------------------------------------------------- /java/FormRecognizer/how-to-guide/id-document-output.md: -------------------------------------------------------------------------------- 1 | ----------- Analyzed license info for page 0 ----------- 2 | 3 | Date of Birth: 1988-03-23, confidence: 0.99 4 | 5 | Document date of expiration: 2026-03-23, confidence: 0.99 6 | 7 | Document number: 034568, confidence: 0.99 8 | 9 | First Name: CHRIS, confidence: 0.99 10 | 11 | Last name: SMITH, confidence: 0.99 12 | 13 | Region: West Virginia, confidence: 0.99 14 | -------------------------------------------------------------------------------- /java/FormRecognizer/how-to-guide/invoice-model-output.md: -------------------------------------------------------------------------------- 1 | ----------- Analyzing invoice  0 ----------- 2 | 3 | Vendor Name: CONTOSO LTD., confidence: 0.93 4 | 5 | Customer Name: MICROSOFT CORPORATION, confidence: 0.91 6 | 7 | Customer Address Recipient: Microsoft Corp, confidence: 0.93 8 | 9 | Invoice ID: INV-100, confidence: 0.97 10 | 11 | Invoice Date: 2019-11-15, confidence: 0.97 12 | 13 | Invoice Items: 14 | 15 | Description: Consulting Services, confidence: 0.95s 16 | 17 | Quantity: 2.000000, confidence: 0.97 18 | 19 | Description: Document Fee, confidence: 0.95s 20 | 21 | Quantity: 3.000000, confidence: 0.97 22 | 23 | Description: Printing Fee, confidence: 0.95s 24 | 25 | Quantity: 10.000000, confidence: 0.97 26 | -------------------------------------------------------------------------------- /java/FormRecognizer/how-to-guide/receipt-model-output.md: -------------------------------------------------------------------------------- 1 | ----------- Analyzing receipt info 0 ----------- 2 | 3 | Merchant Name: Contoso, confidence: 0.62 4 | 5 | Merchant Phone number: +19876543210, confidence: 0.99 6 | 7 | Transaction Date: 2019-06-10, confidence: 0.99 8 | 9 | Receipt Items: 10 | 11 | Total Price: 1998.000000, confidence: 1.00 12 | 13 | Quantity: 2.000000, confidence: 1.00 14 | 15 | Total Price: 299.970001, confidence: 1.00 16 | 17 | Quantity: 3.000000, confidence: 1.00 18 | -------------------------------------------------------------------------------- /java/FormRecognizer/how-to-guide/w2-tax-model-output.md: -------------------------------------------------------------------------------- 1 | ----------- Analyzing Document  0 ----------- 2 | 3 | Form variant: W-2, confidence: 0.99 4 | 5 | Employee Data: 6 | 7 | Employee Name: ANGEL BROWN, confidence: 1.00 8 | 9 | Employer Data: 10 | 11 | Employer Name: CONTOSO LTD, confidence: 1.00 12 | 13 | Employee ID Number: 98-7654321, confidence: 1.00 14 | 15 | Tax year: 2018, confidence: 1.00 16 | 17 | Social Security Tax withheld: 2303.95, confidence: 1.00 18 | -------------------------------------------------------------------------------- /java/FormRecognizer/v3-java-sdk-prebuilt-invoice-output.md: -------------------------------------------------------------------------------- 1 | # Quickstart output: Python SDK prebuilt-invoice model (beta) 2 | 3 | [Reference documentation](/java/api/overview/azure/ai-formrecognizer-readme?view=azure-java-preview&preserve-view=true) | [Library source code](https://github.com/Azure/azure-sdk-for-java/tree/azure-ai-formrecognizer_4.0.0-beta.4/sdk/formrecognizer/azure-ai-formrecognizer/) | [Package (Maven)](https://search.maven.org/artifact/com.azure/azure-ai-formrecognizer/4.0.0-beta.4/jar) | [Samples](https://github.com/Azure/azure-sdk-for-java/blob/main/sdk/formrecognizer/azure-ai-formrecognizer/src/samples/README.md) 4 | 5 | You can get started using the Azure Form Recognizer layout model with the [Java programming language quickstart](https://docs.microsoft.com/azure/applied-ai-services/form-recognizer/quickstarts/try-v3-java-sdk#prebuilt-model). The layout model analyzes and extracts tables, lines, words, and selection marks like radio buttons and check boxes from forms and documents, without the need to train a model. Here is the expected outcome from the prebuilt-invoice model quickstart code: 6 | 7 | ## Prebuilt-invoice output 8 | 9 | ----------- Analyzing invoice  0 ----------- 10 | 11 | Analyzed document has doc type invoice with confidence : 1.00 12 | 13 | .Vendor Name: CONTOSO LTD., confidence: 0.92 14 | 15 | Vendor address: 123 456th St New York, NY, 10001, confidence: 0.91 16 | 17 | Customer Name: MICROSOFT CORPORATION, confidence: 0.84 18 | 19 | Customer Address Recipient: Microsoft Corp, confidence: 0.92 20 | 21 | Invoice ID: INV-100, confidence: 0.97 22 | 23 | Invoice Date: 2019-11-15, confidence: 0.97 24 | 25 | Invoice Items: 26 | 27 | Description: Test for 23 fields, confidence: 0.93s 28 | 29 | Quantity: 1.000000, confidence: 0.97 30 | 31 | --- 32 | -------------------------------------------------------------------------------- /java/LUIS/java-model-with-rest/Model.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-model-with-rest/Model.class -------------------------------------------------------------------------------- /java/LUIS/java-model-with-rest/lib/commons-logging-1.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-model-with-rest/lib/commons-logging-1.2.jar -------------------------------------------------------------------------------- /java/LUIS/java-model-with-rest/lib/httpclient-4.5.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-model-with-rest/lib/httpclient-4.5.3.jar -------------------------------------------------------------------------------- /java/LUIS/java-model-with-rest/lib/httpcore-4.4.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-model-with-rest/lib/httpcore-4.4.6.jar -------------------------------------------------------------------------------- /java/LUIS/java-predict-with-rest/Predict.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-predict-with-rest/Predict.class -------------------------------------------------------------------------------- /java/LUIS/java-predict-with-rest/lib/commons-logging-1.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-predict-with-rest/lib/commons-logging-1.2.jar -------------------------------------------------------------------------------- /java/LUIS/java-predict-with-rest/lib/httpclient-4.5.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-predict-with-rest/lib/httpclient-4.5.3.jar -------------------------------------------------------------------------------- /java/LUIS/java-predict-with-rest/lib/httpcore-4.4.6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/java/LUIS/java-predict-with-rest/lib/httpcore-4.4.6.jar -------------------------------------------------------------------------------- /java/azure_management_service/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 10 | 4.0.0 11 | 12 | com.foo 13 | bar 14 | 0.0.1-SNAPSHOT 15 | 16 | 17 | 18 | 19 | 20 | com.azure 21 | azure-identity 22 | 1.3.2 23 | test 24 | 25 | 26 | 27 | 28 | com.azure.resourcemanager 29 | azure-resourcemanager-cognitiveservices 30 | 1.0.0-beta.2 31 | 32 | 33 | 34 | 35 | 36 | com.fasterxml.jackson.core 37 | jackson-core 38 | 2.12.4 39 | 40 | 41 | 46 | 47 | org.slf4j 48 | slf4j-jdk14 49 | 1.7.25 50 | 51 | 52 | 53 | 54 | 55 | lib 56 | 57 | 58 | org.apache.maven.plugins 59 | maven-dependency-plugin 60 | 61 | 62 | ${project.build.directory} 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /java/qnamaker/rest/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 10 | 4.0.0 11 | 12 | com.foo 13 | bar 14 | 0.0.1-SNAPSHOT 15 | 16 | 17 | 18 | 19 | com.google.code.gson 20 | gson 21 | 2.8.5 22 | 23 | 24 | 25 | 26 | 27 | lib 28 | 29 | 30 | org.apache.maven.plugins 31 | maven-dependency-plugin 32 | 33 | 34 | ${project.build.directory} 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /javascript/AnomalyDetector/request-data.csv: -------------------------------------------------------------------------------- 1 | 2018-03-01T00:00:00Z,32858923 2 | 2018-03-02T00:00:00Z,29615278 3 | 2018-03-03T00:00:00Z,22839355 4 | 2018-03-04T00:00:00Z,25948736 5 | 2018-03-05T00:00:00Z,34139159 6 | 2018-03-06T00:00:00Z,33843985 7 | 2018-03-07T00:00:00Z,33637661 8 | 2018-03-08T00:00:00Z,32627350 9 | 2018-03-09T00:00:00Z,29881076 10 | 2018-03-10T00:00:00Z,22681575 11 | 2018-03-11T00:00:00Z,24629393 12 | 2018-03-12T00:00:00Z,34010679 13 | 2018-03-13T00:00:00Z,33893888 14 | 2018-03-14T00:00:00Z,33760076 15 | 2018-03-15T00:00:00Z,33093515 16 | 2018-03-16T00:00:00Z,29945555 17 | 2018-03-17T00:00:00Z,22676212 18 | 2018-03-18T00:00:00Z,25262514 19 | 2018-03-19T00:00:00Z,33631649 20 | 2018-03-20T00:00:00Z,34468310 21 | 2018-03-21T00:00:00Z,34212281 22 | 2018-03-22T00:00:00Z,38144434 23 | 2018-03-23T00:00:00Z,34662949 24 | 2018-03-24T00:00:00Z,24623684 25 | 2018-03-25T00:00:00Z,26530491 26 | 2018-03-26T00:00:00Z,35445003 27 | 2018-03-27T00:00:00Z,34250789 28 | 2018-03-28T00:00:00Z,33423012 29 | 2018-03-29T00:00:00Z,30744783 30 | 2018-03-30T00:00:00Z,25825128 31 | 2018-03-31T00:00:00Z,21244209 32 | 2018-04-01T00:00:00Z,22576956 33 | 2018-04-02T00:00:00Z,31957221 34 | 2018-04-03T00:00:00Z,33841228 35 | 2018-04-04T00:00:00Z,33554483 36 | 2018-04-05T00:00:00Z,32383350 37 | 2018-04-06T00:00:00Z,29494850 38 | 2018-04-07T00:00:00Z,22815534 39 | 2018-04-08T00:00:00Z,25557267 40 | 2018-04-09T00:00:00Z,34858252 41 | 2018-04-10T00:00:00Z,34750597 42 | 2018-04-11T00:00:00Z,34717956 43 | 2018-04-12T00:00:00Z,34132534 44 | 2018-04-13T00:00:00Z,30762236 45 | 2018-04-14T00:00:00Z,22504059 46 | 2018-04-15T00:00:00Z,26149060 47 | 2018-04-16T00:00:00Z,35250105 -------------------------------------------------------------------------------- /javascript/AutoSuggest/autoSuggest.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var Autosuggest = require("@azure/cognitiveservices-autosuggest"); 4 | var msRest = require("@azure/ms-rest-js"); 5 | 6 | /* To run this sample, install the following modules. 7 | * npm install @azure/cognitiveservices-autosuggest 8 | * npm install @azure/ms-rest-js 9 | */ 10 | 11 | let subscription_key = 'PASTE_YOUR_AUTO_SUGGEST_SUBSCRIPTION_KEY_HERE'; 12 | 13 | // Create a client 14 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 15 | const client = new Autosuggest.AutoSuggestClient(creds); 16 | 17 | async function quickstart() { 18 | // Returns a Suggestions interface 19 | let result = await client.autoSuggest("xb", null); 20 | 21 | // Returns a SuggestionsSuggestionGroup[] 22 | let groups = result.suggestionGroups; 23 | if (groups.length > 0) { 24 | // Returns a SearchAction[] 25 | let suggestions = groups[0].searchSuggestions; 26 | if (suggestions.length > 0) { 27 | // View the entire suggestions list 28 | console.log(suggestions) 29 | // View certain properties 30 | for (var item in suggestions) { 31 | console.log(suggestions[item].query); 32 | console.log(suggestions[item].displayText); 33 | } 34 | } 35 | else { 36 | console.log("No suggestions found in this group."); 37 | } 38 | } 39 | else { 40 | console.log("No suggestions found."); 41 | } 42 | } 43 | 44 | try { 45 | quickstart(); 46 | } 47 | catch (error) { 48 | console.log(error); 49 | } 50 | -------------------------------------------------------------------------------- /javascript/BingLocal/quickstart.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-localsearch 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var LocalSearch = require ("@azure/cognitiveservices-localsearch"); 8 | var msRest = require ("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_BING_SEARCH_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_BING_SEARCH_ENDPOINT'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new LocalSearch.LocalSearchClient(creds, { baseUri: endpoint }); 15 | 16 | async function quickstart() { 17 | var result = await client.local.search("restaurant", { location: 'lat:47.608013;long:-122.335167;re:100m'}); 18 | 19 | var places = result.places.value; 20 | if (places.length > 0) { 21 | console.log("Results:\n"); 22 | places.forEach((item) => { 23 | console.log("Name: " + item.name); 24 | console.log("URL: " + item.url); 25 | console.log(); 26 | }); 27 | } 28 | else { 29 | console.log("No places found for this query."); 30 | } 31 | } 32 | 33 | try { 34 | quickstart(); 35 | } 36 | catch (error) { 37 | console.log(error); 38 | } 39 | -------------------------------------------------------------------------------- /javascript/BingSpellCheck/bing_spell_check_quickstart.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* This Bing Spell Check quickstart takes in some misspelled words and suggests corrections. 4 | * 5 | * Prerequisites: 6 | * - Add your Bing Spell Check subscription key and endpoint to your environment variables, using 7 | * BING_SPELL_CHECK_SUBSCRIPTION_KEY and BING_SPELL_CHECK_ENDPOINT as variable names. 8 | * - Install the following modules: 9 | * npm install @azure/cognitiveservices-spellcheck 10 | * npm install @azure/ms-rest-js 11 | * 12 | * Node SDK: https://docs.microsoft.com/en-us/javascript/api/@azure/cognitiveservices-spellcheck/?view=azure-node-latest 13 | */ 14 | var SpellCheck = require("@azure/cognitiveservices-spellcheck"); 15 | var msRest = require("@azure/ms-rest-js"); 16 | 17 | const key = 'PASTE_YOUR_SPELL_CHECK_SUBSCRIPTION_KEY_HERE'; 18 | const endpoint = 'PASTE_YOUR_SPELL_CHECK_ENDPOINT_HERE'; 19 | 20 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': key } }); 21 | const client = new SpellCheck.SpellCheckClient(creds, { endpoint: endpoint }); 22 | 23 | async function quickstart() { 24 | let query = 'bill gtaes was eher'; 25 | let misspelledWords = []; 26 | let suggestedWords = []; 27 | await client.spellChecker(query) 28 | .then((response) => { 29 | console.log(); 30 | for (var i = 0; i < response._response.parsedBody.flaggedTokens.length; i++) { 31 | var spellingFlaggedToken = response._response.parsedBody.flaggedTokens[i]; 32 | misspelledWords.push(spellingFlaggedToken.token); 33 | var correction = spellingFlaggedToken.suggestions[0].suggestion; // gets each word 34 | suggestedWords.push(correction); 35 | } 36 | console.log('Original query: ' + query); 37 | console.log(); 38 | console.log('Misspelled words: '); 39 | console.log(misspelledWords); 40 | console.log(); 41 | console.log('Suggested correction(s): '); 42 | console.log(suggestedWords); 43 | }).catch((err) => { 44 | throw err; 45 | }) 46 | } 47 | 48 | try { 49 | quickstart(); 50 | } 51 | catch (error) { 52 | console.log(error); 53 | } 54 | -------------------------------------------------------------------------------- /javascript/ComputerVision/4-0/quickstart.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) Microsoft Corporation. 2 | // Licensed under the MIT license. 3 | 4 | // 5 | const { ImageAnalysisClient } = require('@azure-rest/ai-vision-image-analysis'); 6 | const createClient = require('@azure-rest/ai-vision-image-analysis').default; 7 | const { AzureKeyCredential } = require('@azure/core-auth'); 8 | 9 | // Load the .env file if it exists 10 | require("dotenv").config(); 11 | 12 | const endpoint = process.env['VISION_ENDPOINT']; 13 | const key = process.env['VISION_KEY']; 14 | 15 | const credential = new AzureKeyCredential(key); 16 | const client = createClient(endpoint, credential); 17 | 18 | const features = [ 19 | 'Caption', 20 | 'Read' 21 | ]; 22 | 23 | const imageUrl = 'https://learn.microsoft.com/azure/ai-services/computer-vision/media/quickstarts/presentation.png'; 24 | 25 | async function analyzeImageFromUrl() { 26 | const result = await client.path('/imageanalysis:analyze').post({ 27 | body: { 28 | url: imageUrl 29 | }, 30 | queryParameters: { 31 | features: features 32 | }, 33 | contentType: 'application/json' 34 | }); 35 | 36 | const iaResult = result.body; 37 | 38 | if (iaResult.captionResult) { 39 | console.log(`Caption: ${iaResult.captionResult.text} (confidence: ${iaResult.captionResult.confidence})`); 40 | } 41 | if (iaResult.readResult) { 42 | iaResult.readResult.blocks.forEach(block => console.log(`Text Block: ${JSON.stringify(block)}`)); 43 | } 44 | } 45 | 46 | analyzeImageFromUrl(); 47 | // -------------------------------------------------------------------------------- /javascript/ComputerVision/celebrities.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/javascript/ComputerVision/celebrities.jpg -------------------------------------------------------------------------------- /javascript/ComputerVision/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "computervision", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "ComputerVisionQuickstart.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "MIT", 11 | "dependencies": { 12 | "@azure/cognitiveservices-computervision": "^7.0.1", 13 | "@azure/ms-rest-js": "^2.0.8", 14 | "async": "^3.2.0" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /javascript/ContentModerator/text_file.txt: -------------------------------------------------------------------------------- 1 | Is this a grabage email abcdef@abcd.com, phone: 6657789887, IP: 255.255.255.255, 1 Microsoft Way, Redmond, WA 98052. 2 | Crap is the profanity here. Is this information PII? phone 3144444444 -------------------------------------------------------------------------------- /javascript/CustomSearch/bing_custom_search_quickstart.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /* 4 | * This quickstart performs a Bing custom search query, using the search term "xbox". 5 | * 6 | * Prequisites: 7 | * - Get your Bing Custom Search subscription key from the Azure portal, 8 | * then add it to your environment variables as BING_CUSTOM_SEARCH_SUBSCRIPTION_KEY 9 | * - Install these npm packages from the command line: 10 | * npm install azure-cognitiveservices-customsearch 11 | * npm install ms-rest-azure 12 | * - Create a Bing Custom Search instance: https://docs.microsoft.com/en-us/azure/cognitive-services/bing-custom-search/quick-start#create-a-custom-search-instance 13 | * In creating the instance, add some search URLs, such as: https://twitter.com/xbox, 14 | * https://www.facebook.com/xbox, etc. (or your own preferred search URLs). 15 | * 16 | * How to run, from the command line: 17 | * node bing_custom_search_quickstart.js 18 | * 19 | * Azure Bing Custom Search Node.js SDK: 20 | * https://github.com/Azure/azure-sdk-for-node/blob/master/lib/services/cognitiveServicesCustomSearch/README.md 21 | */ 22 | 23 | const search = require('azure-cognitiveservices-customsearch'); 24 | const auth = require('ms-rest-azure'); 25 | 26 | let subscriptionKey = 'PASTE_YOUR_CUSTOM_SEARCH_SUBSCRIPTION_KEY_HERE'; 27 | let instance_ID = 'PASTE_YOUR_CUSTOM_SEARCH_INSTANCE_ID_HERE'; 28 | 29 | let query = 'xbox'; 30 | 31 | let credentials = new auth.CognitiveServicesCredentials(subscriptionKey); 32 | let client = new search.CustomSearchClient(credentials); 33 | 34 | /* For more information about CustomSearchClient and CustomInstance, see: 35 | * https://docs.microsoft.com/en-us/javascript/api/azure-cognitiveservices-customsearch/customsearchclient?view=azure-node-latest 36 | * https://docs.microsoft.com/en-us/javascript/api/azure-cognitiveservices-customsearch/custominstance?view=azure-node-latest 37 | */ 38 | client.customInstance.search(instance_ID, query, function (err, result, request, response) { 39 | if (err) { 40 | console.log(err); 41 | } 42 | else { 43 | console.log(result.queryContext.originalQuery); 44 | console.log(result.webPages.value); 45 | } 46 | }); 47 | -------------------------------------------------------------------------------- /javascript/Face/rest/detect.js: -------------------------------------------------------------------------------- 1 | // 2 | 'use strict'; 3 | 4 | const axios = require('axios').default; 5 | 6 | let subscriptionKey = 'PASTE_YOUR_FACE_SUBSCRIPTION_KEY_HERE' 7 | let endpoint = 'PASTE_YOUR_FACE_ENDPOINT_HERE' + '/face/v1.0/detect' 8 | 9 | // Optionally, replace with your own image URL (for example a .jpg or .png URL). 10 | let imageUrl = 'https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/faces.jpg' 11 | // 12 | 13 | //
14 | // Send a POST request 15 | axios({ 16 | method: 'post', 17 | url: endpoint, 18 | params : { 19 | detectionModel: 'detection_03', 20 | returnFaceId: true 21 | }, 22 | data: { 23 | url: imageUrl, 24 | }, 25 | headers: { 'Ocp-Apim-Subscription-Key': subscriptionKey } 26 | }).then(function (response) { 27 | console.log('Status text: ' + response.status) 28 | console.log('Status text: ' + response.statusText) 29 | console.log() 30 | console.log(response.data) 31 | }).catch(function (error) { 32 | console.log(error) 33 | }); 34 | //
35 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/how-to-guide/business-card-model-output.md: -------------------------------------------------------------------------------- 1 | Name: Chris Smith 2 | 3 | Company: CONTOSO 4 | 5 | Address: 4001 1st Ave NE Redmond, WA 98052 6 | 7 | Job title: Senior Researcher 8 | 9 | Department: Cloud & AI Department 10 | 11 | Email: chris.smith@contoso.com 12 | 13 | Work phone: +1 (987) 213-5674 14 | 15 | Website: https://www.contoso.com/ 16 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/how-to-guide/general-document-model-output.md: -------------------------------------------------------------------------------- 1 | Key-Value Pairs: 2 | 3 | - Key  : "QUARTERLY REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934" 4 | 5 | Value: ":selected:" (0.76) 6 | 7 | - Key  : "For the Quarterly Period Ended" 8 | 9 | Value: "2020" (0.155) 10 | 11 | - Key  : "March 31," 12 | 13 | Value: "" (0.118) 14 | 15 | - Key  : "TRANSITION REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934" 16 | 17 | Value: ":unselected:" (0.761) 18 | 19 | - Key  : "For the Transition Period From" 20 | 21 | Value: "" (0.227) 22 | 23 | - Key  : "to" 24 | 25 | Value: "" (0.267) 26 | 27 | - Key  : "Commission File Number" 28 | 29 | Value: "001-37845" (0.863) 30 | 31 | - Key  : "(STATE OF INCORPORATION)" 32 | 33 | Value: "WASHINGTON 34 | 35 | (425) 882-8080" (0.38) 36 | 37 | - Key  : "(I.R.S. ID)" 38 | 39 | Value: "91-1144442" (0.888) 40 | 41 | - Key  : "Securities registered pursuant to Section 12(g) of the Act:" 42 | 43 | Value: "NONE" (0.816) 44 | 45 | - Key  : "Yes" 46 | 47 | Value: ":selected:" (0.911) 48 | 49 | - Key  : "No" 50 | 51 | Value: ":unselected:" (0.911) 52 | 53 | - Key  : "Yes" 54 | 55 | Value: ":selected:" (0.911) 56 | 57 | - Key  : "No" 58 | 59 | Value: ":unselected:" (0.911) 60 | 61 | - Key  : "Large accelerated filer" 62 | 63 | Value: ":selected:" (0.874) 64 | 65 | - Key  : "Accelerated filer" 66 | 67 | Value: ":unselected:" (0.889) 68 | 69 | - Key  : "Non-accelerated filer" 70 | 71 | Value: ":unselected:" (0.889) 72 | 73 | - Key  : "Smaller reporting company" 74 | 75 | Value: ":unselected:" (0.874) 76 | 77 | - Key  : "Emerging growth company" 78 | 79 | Value: ":unselected:" (0.874) 80 | 81 | - Key  : "If an emerging growth company, indicate by check mark if the registrant has elected not to use the extended transition period for complying with any new or revised financial accounting standards provided pursuant to Section 13(a) of the Exchange Act." 82 | 83 | Value: ":unselected:" (0.674) 84 | 85 | - Key  : "Yes" 86 | 87 | Value: ":unselected:" (0.911) 88 | 89 | - Key  : "No" 90 | 91 | Value: ":selected:" (0.911) 92 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/how-to-guide/id-document-output.md: -------------------------------------------------------------------------------- 1 | Extracted a Driver License: 2 | 3 | Name: CHRIS SMITH 4 | 5 | License No.: 034568 6 | 7 | Date of Birth: 03/23/1988 8 | 9 | Expiration: 03/23/2026 10 | 11 | Height: 5'11" 12 | 13 | Weight: 185LB 14 | 15 | Eye color: BLU 16 | 17 | Restrictions: NONE 18 | 19 | Endorsements: NONE 20 | 21 | Class: A 22 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/how-to-guide/invoice-model-output.md: -------------------------------------------------------------------------------- 1 | Vendor Name: CONTOSO LTD. 2 | 3 | Customer Name: MICROSOFT CORPORATION 4 | 5 | Invoice Date: 11 / 15 / 2019 6 | 7 | Due Date: 12 / 15 / 2019 8 | 9 | Items: 10 | 11 | -A123 12 | 13 | Description: Consulting Services 14 | 15 | Quantity: 2 16 | 17 | Date: 3 / 4 / 2021 18 | 19 | Unit: hours 20 | 21 | Unit Price: $30 .00 22 | 23 | Tax: $6 .00 24 | 25 | Amount: $60 .00 26 | 27 | - 28 | B456 29 | 30 | Description: Document Fee 31 | 32 | Quantity: 3 33 | 34 | Date: 3 / 5 / 2021 35 | 36 | Unit: undefined 37 | 38 | Unit Price: $10 .00 39 | 40 | Tax: $3 .00 41 | 42 | Amount: $30 .00 43 | 44 | - 45 | C789 46 | 47 | Description: Printing Fee 48 | 49 | Quantity: 10 50 | 51 | Date: 3 / 6 / 2021 52 | 53 | Unit: pages 54 | 55 | Unit Price: $1 .00 56 | 57 | Tax: $1 .00 58 | 59 | Amount: $10 .00 60 | 61 | Subtotal: $100 .00 62 | 63 | Previous Unpaid Balance: $500 .00 64 | 65 | Tax: $10 .00 66 | 67 | Amount Due: $610 .00 68 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/how-to-guide/layout-model-output.md: -------------------------------------------------------------------------------- 1 | Pages: 2 | 3 | - Page 1 (unit: inch) 4 | 5 | 8.5x11, angle: 0 6 | 7 | 18 lines, 34 words 8 | 9 | Lines: 10 | 11 | - "Contoso" 12 | 13 | - "Contoso" 14 | 15 | - "Address:" 16 | 17 | - "Address:" 18 | 19 | - "1 Redmond way Suite" 20 | 21 | - "1" 22 | 23 | - "Redmond" 24 | 25 | - "way" 26 | 27 | - "Suite" 28 | 29 | - "6000 Redmond, WA" 30 | 31 | - "6000" 32 | 33 | - "Redmond," 34 | 35 | - "WA" 36 | 37 | - "99243" 38 | 39 | - "99243" 40 | 41 | - "Invoice For: Microsoft" 42 | 43 | - "Invoice" 44 | 45 | - "For:" 46 | 47 | - "Microsoft" 48 | 49 | - "1020 Enterprise Way" 50 | 51 | - "1020" 52 | 53 | - "Enterprise" 54 | 55 | - "Way" 56 | 57 | - "Sunnayvale, CA 87659" 58 | 59 | - "Sunnayvale," 60 | 61 | - "CA" 62 | 63 | - "87659" 64 | 65 | - "Invoice Number" 66 | 67 | - "Invoice" 68 | 69 | - "Number" 70 | 71 | - "Invoice Date" 72 | 73 | - "Invoice" 74 | 75 | - "Date" 76 | 77 | - "Invoice Due Date" 78 | 79 | - "Invoice" 80 | 81 | - "Due" 82 | 83 | - "Date" 84 | 85 | - "Charges" 86 | 87 | - "Charges" 88 | 89 | - "VAT ID" 90 | 91 | - "VAT" 92 | 93 | - "ID" 94 | 95 | - "34278587" 96 | 97 | - "34278587" 98 | 99 | - "6/18/2017" 100 | 101 | - "6/18/2017" 102 | 103 | - "6/24/2017" 104 | 105 | - "6/24/2017" 106 | 107 | - "$56,651.49" 108 | 109 | - "$56,651.49" 110 | 111 | - "PT" 112 | 113 | - "PT" 114 | 115 | Tables: 116 | 117 | - Extracted table: 5 columns, 3 rows (10 cells) 118 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/how-to-guide/receipt-model-output.md: -------------------------------------------------------------------------------- 1 | === Receipt Information === 2 | 3 | Type: receipt.retailMeal 4 | 5 | Merchant: Contoso 6 | 7 | Items: 8 | 9 | `-` Description: Surface Pro 6 10 | 11 |   Total Price: $1,998.00 12 | 13 | `-` Description: Surface Pen 14 | 15 |   Total Price: $299.97 16 | 17 |   Total: $2,516.28 18 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/how-to-guide/w2-tax-model-output.md: -------------------------------------------------------------------------------- 1 | Employee: 2 | 3 | Name: ANGEL BROWN 4 | 5 | SSN/TIN: 123-45-6789 6 | 7 | Address: 8 | 9 | Street Address: 4567 MAIN STREET 10 | 11 | Postal Code: 12345 12 | 13 | Employer: 14 | 15 | Name: CONTOSO LTD 16 | 17 | ID (EIN): 98-7654321 18 | 19 | Address: 20 | 21 | Street Address: 123 MICROSOFT WAY 22 | 23 | Postal Code: 98765 24 | 25 | Control Number: 000086242 26 | 27 | Tax Year: 2018 28 | 29 | Additional Info: 30 | 31 | - DD: 6939.68 32 | 33 | - F: 5432.00 34 | 35 | - D: 876.30 36 | 37 | - C: 123.30 38 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/v3-javascript-sdk-layout-output.md: -------------------------------------------------------------------------------- 1 | # Quickstart output: JavaScript SDK layout model (beta) 2 | 3 | [Reference documentation](/javascript/api/@azure/ai-form-recognizer/?view=azure-node-preview&preserve-view=true) | [Library source code](https://github.com/Azure/azure-sdk-for-js/tree/@azure/ai-form-recognizer_4.0.0-beta.3/sdk/formrecognizer/ai-form-recognizer/) | [Package (npm)](https://www.npmjs.com/package/@azure/ai-form-recognizer/v/4.0.0-beta.3) | [Samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/formrecognizer/ai-form-recognizer/samples/v4-beta/javascript/README.md) 4 | 5 | You can get started using the Azure Form Recognizer layout model with the [JavaScript programming language quickstart](https://docs.microsoft.com/azure/applied-ai-services/form-recognizer/quickstarts/try-v3-javascript-sdk#layout-model). The layout model analyzes and extracts tables, lines, words, and selection marks like radio buttons and check boxes from forms and documents, without the need to train a model. Here is the expected outcome from the layout model quickstart code: 6 | 7 | ## Layout model output 8 | 9 | Pages: 10 | - Page 1 (unit: inch) 11 | 8.5x11, angle: 0 12 | 69 lines, 425 words 13 | Tables: 14 | - Extracted table: 3 columns, 5 rows (15 cells) 15 | - Extracted table: 2 columns, 2 rows (4 cells) 16 | 17 | --- 18 | -------------------------------------------------------------------------------- /javascript/FormRecognizer/v3-javascript-sdk-prebuilt-invoice-output.md: -------------------------------------------------------------------------------- 1 | # Quickstart output: Python SDK prebuilt-invoice model (beta) 2 | 3 | [Reference documentation](/javascript/api/@azure/ai-form-recognizer/?view=azure-node-preview&preserve-view=true) | [Library source code](https://github.com/Azure/azure-sdk-for-js/tree/@azure/ai-form-recognizer_4.0.0-beta.3/sdk/formrecognizer/ai-form-recognizer/) | [Package (npm)](https://www.npmjs.com/package/@azure/ai-form-recognizer/v/4.0.0-beta.3) | [Samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/formrecognizer/ai-form-recognizer/samples/v4-beta/javascript/README.md) 4 | 5 | You can get started using the Azure Form Recognizer layout model with the [JavaScript programming language quickstart](https://docs.microsoft.com/azure/applied-ai-services/form-recognizer/quickstarts/try-v3-javascript-sdk#prebuilt-model). The layout model analyzes and extracts tables, lines, words, and selection marks like radio buttons and check boxes from forms and documents, without the need to train a model. Here is the expected outcome from the prebuilt-invoice model quickstart code: 6 | 7 | ## Prebuilt-invoice output 8 | 9 | Vendor Name: CONTOSO LTD. 10 | 11 | Customer Name: MICROSOFT CORPORATION 12 | 13 | Invoice Date: 2019-11-15T00:00:00.000Z 14 | 15 | Due Date: 2019-12-15T00:00:00.000Z 16 | 17 | Items: 18 | 19 | - 20 | 21 |   Description: Test for 23 fields 22 | 23 |   Quantity: 1 24 | 25 |   Date: undefined 26 | 27 |   Unit: undefined 28 | 29 |   Unit Price: 1 30 | 31 |   Tax: undefined 32 | 33 |   Amount: 100 34 | 35 | Subtotal: 100 36 | 37 | Previous Unpaid Balance: 500 38 | 39 | Tax: 10 40 | 41 | Amount Due: 610 42 | 43 | --- 44 | -------------------------------------------------------------------------------- /javascript/LUIS/node-model-with-rest/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "model-with-rest", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "model.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "request": "^2.88.2", 13 | "request-promise": "^4.2.5" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /javascript/LUIS/node-predict-with-rest/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "predict-with-rest", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "predict.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "querystring": "^0.2.0", 13 | "request": "^2.88.2", 14 | "request-promise": "^4.2.5" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /javascript/LUIS/node-predict-with-rest/predict.js: -------------------------------------------------------------------------------- 1 | // 2 | // This quickstart shows how to predict the intent of an utterance by using the LUIS REST APIs. 3 | // 4 | 5 | var requestPromise = require('request-promise'); 6 | var queryString = require('querystring'); 7 | 8 | // Analyze a string utterance. 9 | getPrediction = async () => { 10 | 11 | ////////// 12 | // Values to modify. 13 | 14 | // YOUR-APP-ID: The App ID GUID found on the www.luis.ai Application Settings page. 15 | const LUIS_appId = "PASTE_YOUR_LUIS_APP_ID_HERE"; 16 | 17 | // YOUR-PREDICTION-KEY: Your LUIS authoring key, 32 character value. 18 | const LUIS_predictionKey = "PASTE_YOUR_LUIS_PREDICTION_SUBSCRIPTION_KEY_HERE"; 19 | 20 | // YOUR-PREDICTION-ENDPOINT: Replace this with your authoring key endpoint. 21 | // For example, "https://westus.api.cognitive.microsoft.com/" 22 | const LUIS_endpoint = "PASTE_YOUR_LUIS_PREDICTION_ENDPOINT_HERE"; 23 | 24 | // The utterance you want to use. 25 | const utterance = "I want two large pepperoni pizzas on thin crust please"; 26 | ////////// 27 | 28 | // Create query string 29 | const queryParams = { 30 | "show-all-intents": true, 31 | "verbose": true, 32 | "query": utterance, 33 | "subscription-key": LUIS_predictionKey 34 | } 35 | 36 | // Create the URI for the REST call. 37 | const URI = `${LUIS_endpoint}luis/prediction/v3.0/apps/${LUIS_appId}/slots/production/predict?${queryString.stringify(queryParams)}` 38 | 39 | // Send the REST call. 40 | const response = await requestPromise(URI); 41 | 42 | // Display the response from the REST call. 43 | console.log(response); 44 | } 45 | 46 | // Pass an utterance to the sample LUIS app 47 | getPrediction().then(()=>console.log("done")).catch((err)=>console.log(err)); 48 | -------------------------------------------------------------------------------- /javascript/LUIS/node-sdk-authoring-prediction/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cognitive-services-quickstart-code-luis-auhoring", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "node luis_authoring_quickstart.js", 8 | "test": "echo \"Error: no test specified\" && exit 1" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/diberry/cognitive-services-quickstart-code.git" 13 | }, 14 | "author": "", 15 | "license": "ISC", 16 | "bugs": { 17 | "url": "https://github.com/diberry/cognitive-services-quickstart-code/issues" 18 | }, 19 | "homepage": "https://github.com/diberry/cognitive-services-quickstart-code#readme", 20 | "dependencies": { 21 | "@azure/cognitiveservices-luis-authoring": "^4.0.0-preview.1", 22 | "@azure/cognitiveservices-luis-runtime": "^5.0.0", 23 | "@azure/ms-rest-js": "^2.0.5", 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /javascript/LUIS/node-sdk-authoring-prediction/readme.md: -------------------------------------------------------------------------------- 1 | # Create, train, publish, delete a Language understanding app 2 | 3 | ## Install 4 | 5 | ```javascript 6 | npm install 7 | ``` 8 | 9 | ## Run app creation and management 10 | 11 | * In Azure portal, create authoring resource for Language Understanding 12 | * In LUIS portal, assign keys to app 13 | * In code files: 14 | * For authoring, set key and endpoint 15 | * Run code file from command line or terminal window with following command. 16 | 17 | ```javascript 18 | node luis_authoring_quickstart.js 19 | ``` 20 | 21 | ### Sample output 22 | 23 | ```console 24 | Created LUIS app with ID e137a439-b3e0-4e16-a7a8-a9746e0715f7 25 | Entity Destination created. 26 | Entity Class created. 27 | Entity Flight created. 28 | Intent FindFlights added. 29 | Created 3 entity labels. 30 | Created 3 entity labels. 31 | Created 3 entity labels. 32 | Example utterances added. 33 | Waiting for train operation to finish... 34 | Current model status: ["Queued"] 35 | Current model status: ["InProgress"] 36 | Current model status: ["InProgress"] 37 | Current model status: ["InProgress"] 38 | Current model status: ["Success"] 39 | Application published. Endpoint URL: https://westus.api.cognitive.microsoft.com/luis/v2.0/apps/e137a439-b3e0-4e16-a7a8-a9746e0715f7 40 | Application with ID e137a439-b3e0-4e16-a7a8-a9746e0715f7 deleted. Operation result: Operation Successful 41 | ``` 42 | 43 | ## Query runtime to get prediction results 44 | 45 | This quickstart uses the public IoT app. 46 | 47 | * In Azure portal, create prediction resource for Language Understanding 48 | * In LUIS portal, assign keys to app 49 | * In code files: 50 | * For prediction, set key, endpoint, app ID, and slot name 51 | * Run code file from command line or terminal window with following command. 52 | 53 | ```javascript 54 | node luis_prediction.js 55 | ``` 56 | 57 | ### Sample output 58 | 59 | The prediction result returns a JSON object: 60 | 61 | ```console 62 | {"query":"turn on all lights","prediction":{"topIntent":"HomeAutomation.TurnOn","intents":{"HomeAutomation.TurnOn":{"score":0.5375382},"None":{"score":0.08687421},"HomeAutomation.TurnOff":{"score":0.0207554}},"entities":{"HomeAutomation.Operation":["on"],"$instance":{"HomeAutomation.Operation":[{"type":"HomeAutomation.Operation","text":"on","startIndex":5,"length":2,"score":0.724984169,"modelTypeId":-1,"modelType":"Unknown","recognitionSources":["model"]}]}}}} 63 | ``` 64 | -------------------------------------------------------------------------------- /javascript/LUIS/sdk-3x/.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------------------------------------- 2 | # Copyright (c) Microsoft Corporation. All rights reserved. 3 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 4 | #------------------------------------------------------------------------------------------------------------- 5 | 6 | # To fully customize the contents of this image, use the following Dockerfile instead: 7 | # https://github.com/microsoft/vscode-dev-containers/tree/v0.128.0/containers/javascript-node-10/.devcontainer/Dockerfile 8 | FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-10 9 | 10 | # ** [Optional] Uncomment this section to install additional packages. ** 11 | # 12 | # RUN apt-get update \ 13 | # && export DEBIAN_FRONTEND=noninteractive \ 14 | # && apt-get -y install --no-install-recommends 15 | 16 | -------------------------------------------------------------------------------- /javascript/LUIS/sdk-3x/.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/vscode-remote/devcontainer.json or this file's README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.128.0/containers/javascript-node-10 3 | { 4 | "name": "Node.js 10", 5 | "dockerFile": "Dockerfile", 6 | 7 | // Set *default* container specific settings.json values on container create. 8 | "settings": { 9 | "terminal.integrated.shell.linux": "/bin/bash" 10 | }, 11 | 12 | // Add the IDs of extensions you want installed when the container is created. 13 | "extensions": [ 14 | "dbaeumer.vscode-eslint" 15 | ] 16 | 17 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 18 | // "forwardPorts": [], 19 | 20 | // Use 'postCreateCommand' to run commands after the container is created. 21 | ,"postCreateCommand": "yarn install", 22 | 23 | // Uncomment to connect as a non-root user. See https://aka.ms/vscode-remote/containers/non-root. 24 | // "remoteUser": "node" 25 | } -------------------------------------------------------------------------------- /javascript/LUIS/sdk-3x/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "luis-sdk-3x", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "node index.js" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "@azure/cognitiveservices-luis-authoring": "^4.0.0-preview.3", 13 | "@azure/cognitiveservices-luis-runtime": "^5.0.0", 14 | "@azure/ms-rest-js": "^2.0.5" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /javascript/Personalizer/README.md: -------------------------------------------------------------------------------- 1 | # About this Quickstart 2 | 3 | This interactive sample takes the time of day and the users's taste preference as context, and sends it to an Azure Personalizer instance, which then returns the top personalized food choice, along with the recommendation probability distribution of each food item. The user then inputs whether or not Personalizer predicted correctly, which is data used to improve Personalizer's prediction model. 4 | 5 | # To try this sample 6 | 7 | ## Prerequisites 8 | 9 | The solution is a Node.js app, you will need [NPM](https://www.npmjs.com/) or an equivalent to install the packages and run the sample file. 10 | 11 | ## Set up the sample 12 | 13 | - Clone the Azure Personalizer Samples repo. 14 | 15 | ```bash 16 | git clone https://github.com/Azure-Samples/cognitive-services-quickstart-code 17 | ``` 18 | 19 | - Navigate to _javascript/Personalizer_. 20 | 21 | - Open `sample.js` for editing. 22 | 23 | ## Set up Azure Personalizer Service 24 | 25 | - Create a Personalizer instance in the Azure portal. 26 | 27 | - Set your environment variables `serviceKey` and `baseUri`. These values can be found in your Cognitive Services Quick start tab in the Azure portal. 28 | 29 | ## Run the sample 30 | 31 | Build and run the sample with `npm start` at the CLI or terminal. The app will take input from the user interactively and send the data to the Personalizer instance. 32 | -------------------------------------------------------------------------------- /javascript/Personalizer/multislot-quickstart/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "personalizer-multi-slot-quickstart", 3 | "version": "0.0.0", 4 | "description": "Sample code used in quickstart to demonstrate Personalizer's multi-slot learning loop of calling Rank and Reward", 5 | "main": "sample.js", 6 | "author": { 7 | "name": "" 8 | }, 9 | "dependencies": { 10 | "@azure/ms-rest-azure-js": "^2.1.0", 11 | "@azure/ms-rest-js": "^2.2.3", 12 | "readline-sync": "^1.4.10", 13 | "requests": "^0.3.0", 14 | "uuid": "^8.3.2" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /javascript/Personalizer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "personalizer-quickstart", 3 | "version": "1.0.0", 4 | "description": "Sample code used in quickstart to demonstrate Personalizer's learning loop of calling Rank and Reward", 5 | "main": "sample.js", 6 | "scripts": { 7 | "start": "node sample.js", 8 | "test": "echo \"Error: no test specified\" && exit 1" 9 | }, 10 | "keywords": [], 11 | "author": "", 12 | "license": "ISC", 13 | "dependencies": { 14 | "@azure/cognitiveservices-personalizer": "^2.0.1", 15 | "@azure/ms-rest-azure-js": "^2.0.1", 16 | "@azure/ms-rest-js": "^2.0.8", 17 | "readline-sync": "^1.4.10", 18 | "uuid": "^8.3.0" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /javascript/QnAMaker/rest-based-quickstart/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "qnamker-rest-based-quickstart", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "rest-based-quickstart.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC", 12 | "dependencies": { 13 | "request": "^2.88.0", 14 | "requestretry": "^4.0.2" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /javascript/QnAMaker/rest/publish-kb.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /* To install dependencies, run: 4 | * npm install requestretry 5 | */ 6 | const request = require("requestretry"); 7 | 8 | /* 9 | * Set the `subscriptionKey` and `endpoint` variables to your 10 | * QnA Maker authoring subscription key and endpoint. 11 | * 12 | * These values can be found in the Azure portal (ms.portal.azure.com/). 13 | * Look up your QnA Maker resource. Then, in the "Resource management" 14 | * section, find the "Keys and Endpoint" page. 15 | * 16 | * The value of `endpoint` has the format https://YOUR-RESOURCE-NAME.cognitiveservices.azure.com. 17 | * 18 | * Set the `kbId` variable to the ID of a knowledge base you have 19 | * previously created. 20 | */ 21 | const subscriptionKey = "PASTE_YOUR_QNA_MAKER_AUTHORING_SUBSCRIPTION_KEY_HERE"; 22 | const endpoint = "PASTE_YOUR_QNA_MAKER_AUTHORING_ENDPOINT_HERE"; 23 | const kbId = "PASTE_YOUR_QNA_MAKER_KB_ID_HERE"; 24 | 25 | const publishKbMethod = "/qnamaker/v4.0/knowledgebases/" + kbId 26 | 27 | const publishKb = async () => { 28 | var request_params = { 29 | uri: endpoint + publishKbMethod, 30 | method: 'POST', 31 | headers: { 32 | 'Ocp-Apim-Subscription-Key': subscriptionKey 33 | } 34 | }; 35 | var response = await request(request_params); 36 | return response; 37 | }; 38 | 39 | publishKb() 40 | .then(response => { 41 | // Note status code 204 is success. 42 | console.log("Result: " + response.statusCode); 43 | }).catch(err => { 44 | console.log(err); 45 | }) 46 | -------------------------------------------------------------------------------- /javascript/QnAMaker/sdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "qnamaker_quickstart-sdk", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "qnamaker_quickstart.js", 6 | "scripts": { 7 | "start": "node qnamaker_quickstart.js", 8 | "test": "echo \"Error: no test specified\" && exit 1" 9 | }, 10 | "keywords": [], 11 | "author": "", 12 | "license": "ISC", 13 | "dependencies": { 14 | "@azure/cognitiveservices-qnamaker": "^2.0.0", 15 | "@azure/cognitiveservices-qnamaker-runtime": "^1.0.0", 16 | "@azure/ms-rest-js": "^2.0.4" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /javascript/QnAMaker/sdk/readme.md: -------------------------------------------------------------------------------- 1 | # Create, update, publish, delete a knowledge base 2 | 3 | ## To use this sample 4 | 5 | 1. Create QnA Maker resource in Azure portal. 6 | 1. Get resource's key and resource name. 7 | 1. Edit values for your key and resource name. 8 | 1. Install dependencies with `npm install`. 9 | 1. Run sample with `npm start`. 10 | 11 | # Install 12 | 13 | ```javascript 14 | npm install 15 | ``` 16 | 17 | ## Run 18 | 19 | ```javascript 20 | npm start 21 | ``` 22 | -------------------------------------------------------------------------------- /javascript/TextAnalytics/REST/rest-api-analyze-sentiment.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | let https = require ('https'); 4 | 5 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE"; 6 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE"; 7 | 8 | let path = '/text/analytics/v3.0/sentiment'; 9 | 10 | let response_handler = function (response) { 11 | let body = ''; 12 | response.on('data', function (d) { 13 | body += d; 14 | }); 15 | response.on('end', function () { 16 | let body_ = JSON.parse(body); 17 | let body__ = JSON.stringify(body_, null, ' '); 18 | console.log(body__); 19 | }); 20 | response.on('error', function (e) { 21 | console.log('Error: ' + e.message); 22 | }); 23 | }; 24 | 25 | let get_sentiments = function (documents) { 26 | let body = JSON.stringify(documents); 27 | 28 | let request_params = { 29 | method: 'POST', 30 | hostname: (new URL(endpoint)).hostname, 31 | path: path, 32 | headers: { 33 | 'Ocp-Apim-Subscription-Key': subscription_key, 34 | } 35 | }; 36 | 37 | let req = https.request(request_params, response_handler); 38 | req.write(body); 39 | req.end(); 40 | } 41 | 42 | let documents = { 43 | 'documents': [ 44 | { 'id': '1', 'language': 'en', 'text': 'I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable.' }, 45 | { 'id': '2', 'language': 'es', 'text': 'Este ha sido un dia terrible, llegué tarde al trabajo debido a un accidente automobilistico.' }, 46 | ] 47 | }; 48 | 49 | get_sentiments(documents); -------------------------------------------------------------------------------- /javascript/TextAnalytics/REST/rest-api-detect-language.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | let https = require ('https'); 4 | 5 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE"; 6 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE"; 7 | 8 | let path = '/text/analytics/v3.0/languages'; 9 | 10 | let response_handler = function (response) { 11 | let body = ''; 12 | response.on('data', function (d) { 13 | body += d; 14 | }); 15 | response.on('end', function () { 16 | let body_ = JSON.parse(body); 17 | let body__ = JSON.stringify(body_, null, ' '); 18 | console.log(body__); 19 | }); 20 | response.on('error', function (e) { 21 | console.log('Error: ' + e.message); 22 | }); 23 | }; 24 | 25 | let get_language = function (documents) { 26 | let body = JSON.stringify(documents); 27 | 28 | let request_params = { 29 | method: 'POST', 30 | hostname: (new URL(endpoint)).hostname, 31 | path: path, 32 | headers: { 33 | 'Ocp-Apim-Subscription-Key': subscription_key, 34 | } 35 | }; 36 | 37 | let req = https.request(request_params, response_handler); 38 | req.write(body); 39 | req.end(); 40 | } 41 | 42 | let documents = { 43 | 'documents': [ 44 | { 'id': '1', 'text': 'This is a document written in English.' }, 45 | { 'id': '2', 'text': 'Este es un document escrito en Español.' }, 46 | { 'id': '3', 'text': '这是一个用中文写的文件' } 47 | ] 48 | }; 49 | 50 | get_language(documents); -------------------------------------------------------------------------------- /javascript/TextAnalytics/REST/rest-api-entity-linking.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | let https = require ('https'); 4 | 5 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE"; 6 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE"; 7 | 8 | let path = '/text/analytics/v3.0/entities/recognition/general'; 9 | 10 | let response_handler = function (response) { 11 | let body = ''; 12 | response.on('data', function (d) { 13 | body += d; 14 | }); 15 | response.on('end', function () { 16 | let body_ = JSON.parse(body); 17 | let body__ = JSON.stringify(body_, null, ' '); 18 | console.log(body__); 19 | }); 20 | response.on('error', function (e) { 21 | console.log('Error: ' + e.message); 22 | }); 23 | }; 24 | 25 | let get_entities = function (documents) { 26 | let body = JSON.stringify(documents); 27 | 28 | let request_params = { 29 | method: 'POST', 30 | hostname: (new URL(endpoint)).hostname, 31 | path: path, 32 | headers: { 33 | 'Ocp-Apim-Subscription-Key': subscription_key, 34 | } 35 | }; 36 | 37 | let req = https.request(request_params, response_handler); 38 | req.write(body); 39 | req.end(); 40 | } 41 | 42 | let documents = { 43 | 'documents': [ 44 | { 'id': '1', 'language': 'en', 'text': 'Microsoft is an It company.' } 45 | ] 46 | }; 47 | 48 | get_entities(documents); -------------------------------------------------------------------------------- /javascript/TextAnalytics/REST/rest-api-extract-key-phrases.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | let https = require ('https'); 4 | 5 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE"; 6 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE"; 7 | 8 | let path = '/text/analytics/v3.0/keyPhrases'; 9 | 10 | let response_handler = function (response) { 11 | let body = ''; 12 | response.on('data', function (d) { 13 | body += d; 14 | }); 15 | response.on('end', function () { 16 | let body_ = JSON.parse(body); 17 | let body__ = JSON.stringify(body_, null, ' '); 18 | console.log(body__); 19 | }); 20 | response.on('error', function (e) { 21 | console.log('Error: ' + e.message); 22 | }); 23 | }; 24 | 25 | let get_key_phrases = function (documents) { 26 | let body = JSON.stringify(documents); 27 | 28 | let request_params = { 29 | method: 'POST', 30 | hostname: (new URL(endpoint)).hostname, 31 | path: path, 32 | headers: { 33 | 'Ocp-Apim-Subscription-Key': subscription_key, 34 | } 35 | }; 36 | 37 | let req = https.request(request_params, response_handler); 38 | req.write(body); 39 | req.end(); 40 | } 41 | 42 | let documents = { 43 | 'documents': [ 44 | { 'id': '1', 'language': 'en', 'text': 'I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable.' }, 45 | { 'id': '2', 'language': 'es', 'text': 'Si usted quiere comunicarse con Carlos, usted debe de llamarlo a su telefono movil. Carlos es muy responsable, pero necesita recibir una notificacion si hay algun problema.' }, 46 | { 'id': '3', 'language': 'en', 'text': 'The Grand Hotel is a new hotel in the center of Seattle. It earned 5 stars in my review, and has the classiest decor I\'ve ever seen.' } 47 | ] 48 | }; 49 | 50 | get_key_phrases(documents); -------------------------------------------------------------------------------- /javascript/TranslatorText/BreakSentence.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-autosuggest 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var TranslatorText = require("@azure/cognitiveservices-translatortext"); 8 | var msRest = require("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_TRANSLATOR_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_TRANSLATOR_ENDPOINT_HERE'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new TranslatorText.TranslatorTextClient(creds, endpoint).translator; 15 | 16 | async function quickstart() { 17 | var result = await client.breakSentence([{ text: "How are you? I am fine. What did you do today?" }]); 18 | 19 | if (0 == result.length) { 20 | console.log ('No sentences found.'); 21 | } 22 | else { 23 | var sentLen = result[0].sentLen; 24 | console.log ('Sentences found: ' + sentLen.length); 25 | for (var i = 0; i < sentLen.length; i++) { 26 | console.log ('Length of sentence ' + (i + 1) + ': ' + sentLen[i]); 27 | } 28 | } 29 | } 30 | 31 | try { 32 | quickstart(); 33 | } 34 | catch (error) { 35 | console.log(error); 36 | } 37 | -------------------------------------------------------------------------------- /javascript/TranslatorText/Detect.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-autosuggest 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var TranslatorText = require("@azure/cognitiveservices-translatortext"); 8 | var msRest = require("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_TRANSLATOR_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_TRANSLATOR_ENDPOINT_HERE'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new TranslatorText.TranslatorTextClient(creds, endpoint).translator; 15 | 16 | async function quickstart() { 17 | var result = await client.detect([{ text: "Salve, mondo!" }]); 18 | 19 | if (0 == result.length) { 20 | console.log ('No results found.'); 21 | } 22 | else { 23 | console.log ('Language detected: ' + result[0].language); 24 | console.log ('Score: ' + result[0].score); 25 | } 26 | } 27 | 28 | try { 29 | quickstart(); 30 | } 31 | catch (error) { 32 | console.log(error); 33 | } 34 | -------------------------------------------------------------------------------- /javascript/TranslatorText/DictionaryExamples.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-autosuggest 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var TranslatorText = require("@azure/cognitiveservices-translatortext"); 8 | var msRest = require("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_TRANSLATOR_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_TRANSLATOR_ENDPOINT_HERE'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new TranslatorText.TranslatorTextClient(creds, endpoint).translator; 15 | 16 | async function quickstart() { 17 | var result = await client.dictionaryExamples('en', 'fr', [{ text: "great", translation: "formidable" }]); 18 | 19 | if (0 == result.length) { 20 | console.log ('No examples found.'); 21 | } 22 | else { 23 | for (var i = 0; i < result[0].examples.length; i++) { 24 | console.log ('Example ' + (i + 1) + ':'); 25 | console.log ('Source prefix: ' + result[0].examples[i].sourcePrefix); 26 | console.log ('Source term: ' + result[0].examples[i].sourceTerm); 27 | console.log ('Source suffix: ' + result[0].examples[i].sourceSuffix); 28 | console.log ('Target prefix: ' + result[0].examples[i].targetPrefix); 29 | console.log ('Target term: ' + result[0].examples[i].targetTerm); 30 | console.log ('Target suffix: ' + result[0].examples[i].targetSuffix); 31 | console.log (); 32 | } 33 | } 34 | } 35 | 36 | try { 37 | quickstart(); 38 | } 39 | catch (error) { 40 | console.log(error); 41 | } 42 | -------------------------------------------------------------------------------- /javascript/TranslatorText/DictionaryLookup.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-autosuggest 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var TranslatorText = require("@azure/cognitiveservices-translatortext"); 8 | var msRest = require("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_TRANSLATOR_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_TRANSLATOR_ENDPOINT_HERE'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new TranslatorText.TranslatorTextClient(creds, endpoint).translator; 15 | 16 | async function quickstart() { 17 | var result = await client.dictionaryLookup('en', 'fr', [{ text: "great" }]); 18 | 19 | if (0 == result.length) { 20 | console.log ('No translations found.'); 21 | } 22 | else { 23 | console.log ('Translations:'); 24 | for (var i = 0; i < result[0].translations.length; i++) { 25 | console.log (result[0].translations[i].displayTarget); 26 | } 27 | } 28 | } 29 | 30 | try { 31 | quickstart(); 32 | } 33 | catch (error) { 34 | console.log(error); 35 | } 36 | -------------------------------------------------------------------------------- /javascript/TranslatorText/Languages.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-autosuggest 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var TranslatorText = require("@azure/cognitiveservices-translatortext"); 8 | var msRest = require("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_TRANSLATOR_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_TRANSLATOR_ENDPOINT_HERE'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new TranslatorText.TranslatorTextClient(creds, endpoint).translator; 15 | 16 | async function quickstart() { 17 | var result = await client.languages (); 18 | console.log ('Languages available:'); 19 | Object.keys(result.translation).forEach(function(key) { 20 | console.log (key + '\t' + result.translation[key].name); 21 | }); 22 | } 23 | 24 | try { 25 | quickstart(); 26 | } 27 | catch (error) { 28 | console.log(error); 29 | } 30 | -------------------------------------------------------------------------------- /javascript/TranslatorText/Translate.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-autosuggest 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var TranslatorText = require("@azure/cognitiveservices-translatortext"); 8 | var msRest = require("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_TRANSLATOR_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_TRANSLATOR_ENDPOINT_HERE'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new TranslatorText.TranslatorTextClient(creds, endpoint).translator; 15 | 16 | async function quickstart() { 17 | var result = await client.translate(['it', 'de'], [{ text: "How are you? I am fine. What did you do today?" }]); 18 | 19 | if (0 == result.length) { 20 | console.log ('No translations found.'); 21 | } 22 | else { 23 | var translations = result[0].translations; 24 | console.log ('Translations:'); 25 | for (var i = 0; i < translations.length; i++) { 26 | console.log (translations[i].to + ': ' + translations[i].text); 27 | } 28 | } 29 | } 30 | 31 | try { 32 | quickstart(); 33 | } 34 | catch (error) { 35 | console.log(error); 36 | } 37 | -------------------------------------------------------------------------------- /javascript/TranslatorText/Transliterate.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /* To run this sample, install the following modules. 4 | * npm install @azure/cognitiveservices-autosuggest 5 | * npm install @azure/ms-rest-js 6 | */ 7 | var TranslatorText = require("@azure/cognitiveservices-translatortext"); 8 | var msRest = require("@azure/ms-rest-js"); 9 | 10 | const subscription_key = 'PASTE_YOUR_TRANSLATOR_SUBSCRIPTION_KEY_HERE'; 11 | const endpoint = 'PASTE_YOUR_TRANSLATOR_ENDPOINT_HERE'; 12 | 13 | const creds = new msRest.ApiKeyCredentials({ inHeader: { 'Ocp-Apim-Subscription-Key': subscription_key } }); 14 | const client = new TranslatorText.TranslatorTextClient(creds, endpoint).translator; 15 | 16 | async function quickstart() { 17 | var result = await client.transliterate('ja', 'jpan', 'latn', [{ text: 'こんにちは' }]); 18 | 19 | if (0 == result.length) { 20 | console.log ('No transliterations found.'); 21 | } 22 | else { 23 | console.log (result[0].text); 24 | } 25 | } 26 | 27 | try { 28 | quickstart(); 29 | } 30 | catch (error) { 31 | console.log(error); 32 | } 33 | -------------------------------------------------------------------------------- /php/ComputerVision/analyze-image/analyze-image.php: -------------------------------------------------------------------------------- 1 | 2 | 3 | Analyze Image Sample 4 | 5 | 6 | with a valid key. 8 | $ocpApimkey = 'PASTE_YOUR_COMPUTER_VISION_KEY_HERE'; 9 | $uriBase = 'PASTE_YOUR_COMPUTER_VISION_ENDPOINT_HERE' + 'vision/v3.1/'; 10 | 11 | $imageUrl = 'https://upload.wikimedia.org/wikipedia/commons/3/3c/Shaki_waterfall.jpg'; 12 | 13 | require_once 'HTTP/Request2.php'; 14 | 15 | $request = new Http_Request2($uriBase . '/analyze'); 16 | $url = $request->getUrl(); 17 | 18 | $headers = array( 19 | // Request headers 20 | 'Content-Type' => 'application/json', 21 | 'Ocp-Apim-Subscription-Key' => $ocpApimkey 22 | ); 23 | $request->setHeader($headers); 24 | 25 | $parameters = array( 26 | // Request parameters 27 | 'visualFeatures' => 'Categories,Description', 28 | 'details' => '', 29 | 'language' => 'en' 30 | ); 31 | $url->setQueryVariables($parameters); 32 | 33 | $request->setMethod(HTTP_Request2::METHOD_POST); 34 | 35 | // Request body parameters 36 | $body = json_encode(array('url' => $imageUrl)); 37 | 38 | // Request body 39 | $request->setBody($body); 40 | 41 | try 42 | { 43 | $response = $request->send(); 44 | echo "
" .
45 |         json_encode(json_decode($response->getBody()), JSON_PRETTY_PRINT) . "
"; 46 | } 47 | catch (HttpException $ex) 48 | { 49 | echo "
" . $ex . "
"; 50 | } 51 | ?> 52 | 53 | -------------------------------------------------------------------------------- /php/ComputerVision/extract-printed-text/get-printed-text.php: -------------------------------------------------------------------------------- 1 | 3 | 4 | OCR Sample 5 | 6 | 7 | with a valid key. 9 | $ocpApimkey = 'PASTE_YOUR_COMPUTER_VISION_KEY_HERE'; 10 | $uriBase = 'PASTE_YOUR_COMPUTER_VISION_ENDPOINT_HERE' + 'vision/v3.1/'; 11 | 12 | $imageUrl = 'https://upload.wikimedia.org/wikipedia/commons/thumb/a/af/' . 13 | 'Atomist_quote_from_Democritus.png/338px-Atomist_quote_from_Democritus.png'; 14 | 15 | require_once 'HTTP/Request2.php'; 16 | 17 | $request = new Http_Request2($uriBase . 'ocr'); 18 | $url = $request->getUrl(); 19 | 20 | $headers = array( 21 | // Request headers 22 | 'Content-Type' => 'application/json', 23 | 'Ocp-Apim-Subscription-Key' => $ocpApimkey 24 | ); 25 | $request->setHeader($headers); 26 | 27 | $parameters = array( 28 | // Request parameters 29 | 'language' => 'unk', 30 | 'detectOrientation' => 'true' 31 | ); 32 | $url->setQueryVariables($parameters); 33 | 34 | $request->setMethod(HTTP_Request2::METHOD_POST); 35 | 36 | // Request body parameters 37 | $body = json_encode(array('url' => $imageUrl)); 38 | 39 | // Request body 40 | $request->setBody($body); 41 | 42 | try 43 | { 44 | $response = $request->send(); 45 | echo "
" .
46 |         json_encode(json_decode($response->getBody()), JSON_PRETTY_PRINT) . "
"; 47 | } 48 | catch (HttpException $ex) 49 | { 50 | echo "
" . $ex . "
"; 51 | } 52 | ?> 53 | 54 | -------------------------------------------------------------------------------- /php/ComputerVision/generate-thumbnail/get-thumbnail.php: -------------------------------------------------------------------------------- 1 | 2 | 3 | Get Thumbnail Sample 4 | 5 | 6 | with a valid key. 8 | $ocpApimkey = 'PASTE_YOUR_COMPUTER_VISION_KEY_HERE'; 9 | $uriBase = 'PASTE_YOUR_COMPUTER_VISION_ENDPOINT_HERE' + 'vision/v3.1/'; 10 | 11 | $imageUrl = 12 | 'https://upload.wikimedia.org/wikipedia/commons/9/94/Bloodhound_Puppy.jpg'; 13 | 14 | require_once 'HTTP/Request2.php'; 15 | 16 | $request = new Http_Request2($uriBase . 'generateThumbnail'); 17 | $url = $request->getUrl(); 18 | 19 | $headers = array( 20 | // Request headers 21 | 'Content-Type' => 'application/json', 22 | 'Ocp-Apim-Subscription-Key' => $ocpApimkey 23 | ); 24 | $request->setHeader($headers); 25 | 26 | $parameters = array( 27 | // Request parameters 28 | 'width' => '100', // Width of the thumbnail. 29 | 'height' => '100', // Height of the thumbnail. 30 | 'smartCropping' => 'true', 31 | ); 32 | $url->setQueryVariables($parameters); 33 | 34 | $request->setMethod(HTTP_Request2::METHOD_POST); 35 | 36 | // Request body parameters 37 | $body = json_encode(array('url' => $imageUrl)); 38 | 39 | // Request body 40 | $request->setBody($body); 41 | 42 | try 43 | { 44 | $response = $request->send(); 45 | echo "
" .
46 |         json_encode(json_decode($response->getBody()), JSON_PRETTY_PRINT) . "
"; 47 | } 48 | catch (HttpException $ex) 49 | { 50 | echo "
" . $ex . "
"; 51 | } 52 | ?> 53 | 54 | -------------------------------------------------------------------------------- /php/ComputerVision/use-domain-model/use-domain-model.php: -------------------------------------------------------------------------------- 1 | 2 | 3 | Analyze Domain Model Sample 4 | 5 | 6 | with a valid key. 8 | $ocpApimkey = 'PASTE_YOUR_COMPUTER_VISION_KEY_HERE'; 9 | $uriBase = 'PASTE_YOUR_COMPUTER_VISION_ENDPOINT_HERE' + 'vision/v3.1/'; 10 | 11 | // Change 'landmarks' to 'celebrities' to use the Celebrities model. 12 | $domain = 'landmarks'; 13 | 14 | $imageUrl = 15 | 'https://upload.wikimedia.org/wikipedia/commons/2/23/Space_Needle_2011-07-04.jpg'; 16 | 17 | require_once 'HTTP/Request2.php'; 18 | 19 | $request = new Http_Request2($uriBase . 'models/' . $domain . '/analyze'); 20 | $url = $request->getUrl(); 21 | 22 | $headers = array( 23 | // Request headers 24 | 'Content-Type' => 'application/json', 25 | 'Ocp-Apim-Subscription-Key' => $ocpApimkey 26 | ); 27 | $request->setHeader($headers); 28 | 29 | $parameters = array( 30 | // Request parameters 31 | 'model' => $domain 32 | ); 33 | $url->setQueryVariables($parameters); 34 | 35 | $request->setMethod(HTTP_Request2::METHOD_POST); 36 | 37 | // Request body parameters 38 | $body = json_encode(array('url' => $imageUrl)); 39 | 40 | // Request body 41 | $request->setBody($body); 42 | 43 | try 44 | { 45 | $response = $request->send(); 46 | echo "
" .
47 |         json_encode(json_decode($response->getBody()), JSON_PRETTY_PRINT) . "
"; 48 | } 49 | catch (HttpException $ex) 50 | { 51 | echo "
" . $ex . "
"; 52 | } 53 | ?> 54 | 55 | -------------------------------------------------------------------------------- /php/TextAnalytics/REST/AnalyzeSentiment.php: -------------------------------------------------------------------------------- 1 | &$value) { 16 | $value['text'] = utf8_encode($value['text']); 17 | } 18 | } 19 | 20 | $data = json_encode ($data); 21 | 22 | $headers = "Content-type: text/json\r\n" . 23 | "Content-Length: " . strlen($data) . "\r\n" . 24 | "Ocp-Apim-Subscription-Key: $key\r\n"; 25 | 26 | // NOTE: Use the key 'http' even if you are making an HTTPS request. See: 27 | // https://php.net/manual/en/function.stream-context-create.php 28 | $options = array ( 29 | 'http' => array ( 30 | 'header' => $headers, 31 | 'method' => 'POST', 32 | 'content' => $data 33 | ) 34 | ); 35 | $context = stream_context_create ($options); 36 | $result = file_get_contents ($host . $path, false, $context); 37 | return $result; 38 | } 39 | 40 | $data = array ( 41 | 'documents' => array ( 42 | array ( 'id' => '1', 'language' => 'en', 'text' => 'I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable.' ), 43 | array ( 'id' => '2', 'language' => 'es', 'text' => 'Este ha sido un dia terrible, llegué tarde al trabajo debido a un accidente automobilistico.' ) 44 | ) 45 | ); 46 | 47 | print "Please wait a moment for the results to appear."; 48 | 49 | $result = GetSentiment($endpoint, $path, $subscription_key, $data); 50 | 51 | echo json_encode (json_decode ($result), JSON_PRETTY_PRINT); 52 | ?> -------------------------------------------------------------------------------- /php/TextAnalytics/REST/DetectLanguage.php: -------------------------------------------------------------------------------- 1 | array ( 24 | 'header' => $headers, 25 | 'method' => 'POST', 26 | 'content' => $data 27 | ) 28 | ); 29 | $context = stream_context_create ($options); 30 | $result = file_get_contents ($host . $path, false, $context); 31 | return $result; 32 | } 33 | 34 | $data = array ( 35 | 'documents' => array ( 36 | array ( 'id' => '1', 'text' => 'This is a document written in English.' ), 37 | array ( 'id' => '2', 'text' => 'Este es un document escrito en Español.' ), 38 | array ( 'id' => '3', 'text' => '这是一个用中文写的文件') 39 | ) 40 | ); 41 | 42 | print "Please wait a moment for the results to appear."; 43 | 44 | $result = DetectLanguage ($endpoint, $path, $subscription_key, $data); 45 | 46 | echo json_encode (json_decode ($result), JSON_PRETTY_PRINT); 47 | ?> -------------------------------------------------------------------------------- /php/TextAnalytics/REST/GetKeyPhrases.php: -------------------------------------------------------------------------------- 1 | array ( 23 | 'header' => $headers, 24 | 'method' => 'POST', 25 | 'content' => $data 26 | ) 27 | ); 28 | $context = stream_context_create ($options); 29 | $result = file_get_contents ($host . $path, false, $context); 30 | return $result; 31 | } 32 | 33 | $data = array ( 34 | 'documents' => array ( 35 | array ( 'id' => '1', 'language' => 'en', 'text' => 'I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable.' ), 36 | array ( 'id' => '2', 'language' => 'es', 'text' => 'Si usted quiere comunicarse con Carlos, usted debe de llamarlo a su telefono movil. Carlos es muy responsable, pero necesita recibir una notificacion si hay algun problema.' ), 37 | array ( 'id' => '3', 'language' => 'en', 'text' => 'The Grand Hotel is a new hotel in the center of Seattle. It earned 5 stars in my review, and has the classiest decor I\'ve ever seen.' ) 38 | ) 39 | ); 40 | 41 | print "Please wait a moment for the results to appear."; 42 | 43 | $result = GetKeyPhrases($endpoint, $path, $subscription_key, $data); 44 | 45 | echo json_encode (json_decode ($result), JSON_PRETTY_PRINT); 46 | ?> -------------------------------------------------------------------------------- /php/TextAnalytics/REST/IdentifyEntities.php: -------------------------------------------------------------------------------- 1 | array ( 23 | 'header' => $headers, 24 | 'method' => 'POST', 25 | 'content' => $data 26 | ) 27 | ); 28 | $context = stream_context_create ($options); 29 | $result = file_get_contents ($host . $path, false, $context); 30 | return $result; 31 | } 32 | 33 | $data = array ( 34 | 'documents' => array ( 35 | array ( 'id' => '1', 'language' => 'en', 'text' => 'Microsoft is and It company.' ), 36 | ) 37 | ); 38 | 39 | print "Please wait a moment for the results to appear."; 40 | 41 | $result = GetEntities($endpoint, $path, $subscription_key, $data); 42 | 43 | echo json_encode (json_decode ($result), JSON_PRETTY_PRINT); 44 | ?> -------------------------------------------------------------------------------- /php/face/rest/detect.php: -------------------------------------------------------------------------------- 1 | getUrl(); 14 | 15 | $headers = array( 16 | // Request headers 17 | 'Content-Type' => 'application/json', 18 | 'Ocp-Apim-Subscription-Key' => $ocpApimSubscriptionKey 19 | ); 20 | $request->setHeader($headers); 21 | 22 | $parameters = array( 23 | // Request parameters 24 | 'detectionModel' => 'detection_03', 25 | 'returnFaceId' => 'true'); 26 | $url->setQueryVariables($parameters); 27 | 28 | $request->setMethod(HTTP_Request2::METHOD_POST); 29 | 30 | // Request body parameters 31 | $body = json_encode(array('url' => $imageUrl)); 32 | 33 | // Request body 34 | $request->setBody($body); 35 | 36 | try 37 | { 38 | $response = $request->send(); 39 | echo "
" .
40 |         json_encode(json_decode($response->getBody()), JSON_PRETTY_PRINT) . "
"; 41 | } 42 | catch (HttpException $ex) 43 | { 44 | echo "
" . $ex . "
"; 45 | } 46 | ?> 47 | -------------------------------------------------------------------------------- /powershell/management.ps1: -------------------------------------------------------------------------------- 1 | # To install Azure PowerShell, see: 2 | # https://github.com/Azure/azure-powershell 3 | 4 | # Note There are 32-bit and 64-bit versions of Powershell. 5 | # In Windows 10, the "Windows Powershell" shortcut runs the 64-bit version of Powershell, and the "Windows Powershell (x86)" shortcut runs the 32-bit version of Powershell. 6 | # Powershell modules for the 32-bit version of Powershell are stored in C:\Program Files (x86)\WindowsPowerShell\Modules\. 7 | # Powershell modules for the 64-bit version of Powershell are stored in C:\Program Files\WindowsPowerShell\Modules\. 8 | # If you install the Azure Powershell commandlets using the 32-bit version of Powershell, you will not be able to access them from the 64-bit version of Powershell. 9 | # Likewise, if you install the Azure Powershell commandlets using the 64-bit version of Powershell, you will not be able to access them from the 32-bit version of Powershell. 10 | 11 | # The name of the Azure resource group in which you want to create the resource. 12 | # You can find resource groups in the Azure Dashboard under Home > Resource groups. 13 | $resource_group_name = "TODO_REPLACE"; 14 | 15 | echo "Connecting to Azure account." 16 | Connect-AzAccount 17 | 18 | # For more information see: 19 | # https://docs.microsoft.com/en-us/powershell/module/az.cognitiveservices/get-azcognitiveservicesaccount?view=azps-3.3.0 20 | echo "Resources in resource group $resource_group_name:" 21 | Get-AzCognitiveServicesAccount -ResourceGroupName $resource_group_name 22 | 23 | # Uncomment this to list all available resource kinds, SKUs, and locations for your Azure account. 24 | # For more information see: 25 | # https://docs.microsoft.com/en-us/powershell/module/az.cognitiveservices/get-azcognitiveservicesaccountsku?view=azps-3.3.0 26 | #Get-AzCognitiveServicesAccountSku 27 | 28 | # For more information see: 29 | # https://docs.microsoft.com/en-us/powershell/module/az.cognitiveservices/new-azcognitiveservicesaccount?view=azps-3.3.0 30 | echo "Creating a resource with kind Text Translation, SKU F0 (free tier), location global." 31 | $result = New-AzCognitiveServicesAccount -ResourceGroupName $resource_group_name -Name "test_resource" -Type "TextTranslation" -SkuName "F0" -Location "Global" 32 | echo "Result:" 33 | echo $result 34 | 35 | # For more information see: 36 | # https://docs.microsoft.com/en-us/powershell/module/az.cognitiveservices/remove-azcognitiveservicesaccount?view=azps-3.3.0 37 | echo "Removing resource." 38 | Remove-AzCognitiveServicesAccount -ResourceGroupName $resource_group_name -Name "test_resource" 39 | -------------------------------------------------------------------------------- /python/AutoSuggest/Quickstart.py: -------------------------------------------------------------------------------- 1 | import json, os, sys 2 | 3 | from azure.cognitiveservices.search.autosuggest import AutoSuggestClient 4 | from msrest.authentication import CognitiveServicesCredentials 5 | 6 | ''' 7 | Microsoft Azure Cognitive Services Bing Autosuggest - Get Search Suggestions 8 | 9 | Install the Bing Autosuggest module: 10 | python -m pip install azure-cognitiveservices-search_autosuggest 11 | ''' 12 | 13 | subscription_key = "PASTE_YOUR_AUTO_SUGGEST_SUBSCRIPTION_KEY_HERE" 14 | endpoint = "PASTE_YOUR_AUTO_SUGGEST_ENDPOINT_HERE" 15 | 16 | # Instantiate a Bing Autosuggest client 17 | client = AutoSuggestClient(endpoint, CognitiveServicesCredentials(subscription_key)) 18 | 19 | # Returns from the Suggestions class 20 | result = client.auto_suggest('sail') 21 | 22 | # Access all suggestions 23 | suggestions = result.suggestion_groups[0] 24 | 25 | # print results 26 | for suggestion in suggestions.search_suggestions: 27 | print(suggestion.query) 28 | print(suggestion.display_text) 29 | -------------------------------------------------------------------------------- /python/BingSpellCheck/bing_spell_check_quickstart.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from azure.cognitiveservices.language.spellcheck import SpellCheckClient 4 | from msrest.authentication import CognitiveServicesCredentials 5 | 6 | ''' 7 | This Bing Spell Check quickstart checks some misspelled words and suggests corrections. 8 | 9 | Prerequisites: 10 | - Add your Bing Spell Check subscription key to your environment variables, using 11 | BING_SPELL_CHECK_SUBSCRIPTION_KEY as a variable name. 12 | - Install the following modules: 13 | pip install azure.cognitiveservices.language.spellcheck 14 | pip install msrest 15 | 16 | Python SDK: https://docs.microsoft.com/en-us/python/api/overview/azure/cognitiveservices/spellcheck?view=azure-python 17 | ''' 18 | 19 | SUBSCRIPTION_KEY = 'PASTE_YOUR_SPELL_CHECK_SUBSCRIPTION_KEY_HERE' 20 | ENDPOINT = 'PASTE_YOUR_SPELL_CHECK_ENDPOINT_HERE' 21 | 22 | # Create a client 23 | client = SpellCheckClient(ENDPOINT, CognitiveServicesCredentials(SUBSCRIPTION_KEY)) 24 | 25 | try: 26 | # Original query 27 | query = 'bill gtaes was ehre toody' 28 | print('Original query:\n', query) 29 | print() 30 | # Check the query for misspellings 31 | # mode can be 'proof' or 'spell' 32 | result = client.spell_checker(query, mode='proof') 33 | 34 | # Print the suggested corrections 35 | print('Suggested correction:') 36 | for token in result.flagged_tokens: 37 | for suggestion_object in token.suggestions: 38 | print(suggestion_object.suggestion) 39 | 40 | except Exception as err: 41 | print("Encountered exception. {}".format(err)) 42 | -------------------------------------------------------------------------------- /python/ComputerVision/4-0/sample.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/python/ComputerVision/4-0/sample.jpg -------------------------------------------------------------------------------- /python/Face/cognsvcsdk/README.md: -------------------------------------------------------------------------------- 1 | # Face quickstart and examples 2 | 3 | This quickstart and example use the [Face SDK](https://docs.microsoft.com/en-us/python/api/azure-cognitiveservices-vision-face/?view=azure-python) for Cognitive Services. 4 | 5 | ## Prerequisites 6 | - Python 3+ 7 | - Install Face SDK: pip install azure-cognitiveservices-vision-face 8 | 9 | ### Images 10 | - To your root folder, add all images downloaded mentioned in the quickstart. 11 | - For the DetectIdentifyFace example, download all "woman" and "child" images. 12 | 13 | https://github.com/Azure-examples/cognitive-services-sample-data-files/tree/master/Face/images 14 | 15 | ## Run 16 | - Run from your favorite IDE, or using this from the command line: 17 | 18 | python FaceQuickstart.py 19 | python DetectIdentifyFace.py 20 | 21 | ## References 22 | - Documentation: https://docs.microsoft.com/en-us/azure/cognitive-services/face/ 23 | - SDK: https://docs.microsoft.com/en-us/python/api/azure-cognitiveservices-vision-face/azure.cognitiveservices.vision.face?view=azure-python 24 | - All Face APIs: https://docs.microsoft.com/en-us/azure/cognitive-services/face/APIReference 25 | -------------------------------------------------------------------------------- /python/Face/rest/detect.py: -------------------------------------------------------------------------------- 1 | import json, os, requests 2 | 3 | subscription_key = "PASTE_YOUR_FACE_SUBSCRIPTION_KEY_HERE" 4 | 5 | face_api_url = "PASTE_YOUR_FACE_ENDPOINT_HERE" + '/face/v1.0/detect' 6 | 7 | image_url = 'https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/faces.jpg' 8 | 9 | headers = {'Ocp-Apim-Subscription-Key': subscription_key} 10 | 11 | params = { 12 | 'detectionModel': 'detection_03', 13 | 'returnFaceId': 'true' 14 | } 15 | 16 | response = requests.post(face_api_url, params=params, 17 | headers=headers, json={"url": image_url}) 18 | print(json.dumps(response.json())) 19 | -------------------------------------------------------------------------------- /python/FormRecognizer/FormRecognizerLogging.py: -------------------------------------------------------------------------------- 1 | # 2 | import sys 3 | import logging 4 | from azure.ai.formrecognizer import FormRecognizerClient 5 | from azure.core.credentials import AzureKeyCredential 6 | 7 | # Create a logger for the 'azure' SDK 8 | logger = logging.getLogger('azure') 9 | logger.setLevel(logging.DEBUG) 10 | 11 | # Configure a console output 12 | handler = logging.StreamHandler(stream=sys.stdout) 13 | logger.addHandler(handler) 14 | 15 | endpoint = "PASTE_YOUR_FORM_RECOGNIZER_ENDPOINT_HERE" 16 | credential = AzureKeyCredential("PASTE_YOUR_FORM_RECOGNIZER_SUBSCRIPTION_KEY_HERE") 17 | 18 | # This client will log detailed information about its HTTP sessions, at DEBUG level 19 | form_recognizer_client = FormRecognizerClient(endpoint, credential, logging_enable=True) 20 | # 21 | 22 | # 23 | receiptUrl = "https://raw.githubusercontent.com/Azure/azure-sdk-for-python/master/sdk/formrecognizer/azure-ai-formrecognizer/tests/sample_forms/receipt/contoso-receipt.png" 24 | poller = form_recognizer_client.begin_recognize_receipts_from_url(receiptUrl, logging_enable=True) 25 | # 26 | -------------------------------------------------------------------------------- /python/FormRecognizer/how-to-find-endpoint-and-key.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/python/FormRecognizer/how-to-find-endpoint-and-key.png -------------------------------------------------------------------------------- /python/FormRecognizer/how-to-guide/business-card-model-output.md: -------------------------------------------------------------------------------- 1 | --------Analyzing business card #1-------- 2 | 3 | Contact First Name: Chris has confidence: 0.989 4 | 5 | Contact Last Name: Smith has confidence: 0.99 6 | 7 | Company Name: CONTOSO has confidence: 0.4 8 | 9 | Department: Cloud & AI Department has confidence: 0.973 10 | 11 | Job Title: Senior Researcher has confidence: 0.988 12 | 13 | Email: chris.smith@contoso.com has confidence: 0.989 14 | 15 | Website: https://www.contoso.com/ has confidence: 0.989 16 | 17 | Address: AddressValue(house_number=4001, po_box=None, road=1st Ave NE, city=Redmond, state=WA, postal_code=98052, country_region=None, street_address=4001 1st Ave NE) has confidence: 0.969 18 | 19 | Mobile phone number: +1 (987) 123-4567 has confidence: 0.988 20 | 21 | Fax number: +1 (987) 312-6745 has confidence: 0.988 22 | 23 | Work phone number: +1 (987) 213-5674 has confidence: 0.985 24 | 25 | --- 26 | -------------------------------------------------------------------------------- /python/FormRecognizer/how-to-guide/id-document-output.md: -------------------------------------------------------------------------------- 1 | --------Analyzing ID document #1-------- 2 | 3 | First Name: CHRIS has confidence: 0.989 4 | 5 | Last Name: SMITH has confidence: 0.989 6 | 7 | Document Number: 034568 has confidence: 0.99 8 | 9 | Date of Birth: 1988-03-23 has confidence: 0.99 10 | 11 | Date of Expiration: 2026-03-23 has confidence: 0.99 12 | 13 | Sex: M has confidence: 0.99 14 | 15 | Address: AddressValue(house_number=None, po_box=None, road=Main Street, city=Charleston, state=WV, postal_code=456789, country_region=None, street_address=Main Street) has confidence: 0.99 16 | 17 | Country/Region: USA has confidence: 0.995 18 | 19 | Region: West Virginia has confidence: 0.99 20 | 21 | --- 22 | -------------------------------------------------------------------------------- /python/FormRecognizer/how-to-guide/receipt-model-output.md: -------------------------------------------------------------------------------- 1 | --------Analysis of receipt #1-------- 2 | 3 | Receipt type: receipt.retailMeal 4 | 5 | Merchant Name: Contoso has confidence: 0.616 6 | 7 | Transaction Date: 2019-06-10 has confidence: 0.989 8 | 9 | Receipt items: 10 | 11 | ...Item #1 12 | 13 | ......Item Description: Surface Pro 6 has confidence: 0.99 14 | 15 | ......Item Quantity: 2.0 has confidence: 0.995 16 | 17 | ......Total Item Price: 1998.0 has confidence: 0.995 18 | 19 | ...Item #2 20 | 21 | ......Item Description: Surface Pen has confidence: 0.99 22 | 23 | ......Item Quantity: 3.0 has confidence: 0.995 24 | 25 | ......Total Item Price: 299.97 has confidence: 0.995 26 | 27 | Subtotal: 2297.97 has confidence: 0.986 28 | 29 | Total tax: 218.31 has confidence: 0.989 30 | 31 | Total: 2516.28 has confidence: 0.988 32 | 33 | --- 34 | -------------------------------------------------------------------------------- /python/FormRecognizer/images/how-to-find-endpoint-and-key.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/python/FormRecognizer/images/how-to-find-endpoint-and-key.png -------------------------------------------------------------------------------- /python/FormRecognizer/sample-insurance-card.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/python/FormRecognizer/sample-insurance-card.png -------------------------------------------------------------------------------- /python/FormRecognizer/sample-receipt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Azure-Samples/cognitive-services-quickstart-code/877544c199acb8beda9caf0bd16d868fa8cf687f/python/FormRecognizer/sample-receipt.png -------------------------------------------------------------------------------- /python/LUIS/python-predict-with-rest/predict.py: -------------------------------------------------------------------------------- 1 | ########### Python 3.6 ############# 2 | 3 | # 4 | # This quickstart shows how to predict the intent of an utterance by using the LUIS REST APIs. 5 | # 6 | 7 | import requests 8 | 9 | try: 10 | 11 | ########## 12 | # Values to modify. 13 | 14 | # YOUR-APP-ID: The App ID GUID found on the www.luis.ai Application Settings page. 15 | appId = 'PASTE_YOUR_LUIS_APP_ID_HERE' 16 | 17 | # YOUR-PREDICTION-KEY: Your LUIS prediction key, 32 character value. 18 | prediction_key = 'PASTE_YOUR_LUIS_PREDICTION_SUBSCRIPTION_KEY_HERE' 19 | 20 | # YOUR-PREDICTION-ENDPOINT: Replace with your prediction endpoint. 21 | # For example, "https://westus.api.cognitive.microsoft.com/" 22 | prediction_endpoint = 'PASTE_YOUR_LUIS_PREDICTION_ENDPOINT_HERE' 23 | 24 | # The utterance you want to use. 25 | utterance = 'I want two large pepperoni pizzas on thin crust please' 26 | ########## 27 | 28 | # The headers to use in this REST call. 29 | headers = { 30 | } 31 | 32 | # The URL parameters to use in this REST call. 33 | params ={ 34 | 'query': utterance, 35 | 'timezoneOffset': '0', 36 | 'verbose': 'true', 37 | 'show-all-intents': 'true', 38 | 'spellCheck': 'false', 39 | 'staging': 'false', 40 | 'subscription-key': prediction_key 41 | } 42 | 43 | 44 | # Make the REST call. 45 | response = requests.get(f'{prediction_endpoint}luis/prediction/v3.0/apps/{appId}/slots/production/predict', headers=headers, params=params) 46 | 47 | # Display the results on the console. 48 | print(response.json()) 49 | 50 | 51 | except Exception as e: 52 | # Display the error string. 53 | print(f'{e}') 54 | -------------------------------------------------------------------------------- /python/LUIS/python-sdk-authoring-prediction/prediction_quickstart.py: -------------------------------------------------------------------------------- 1 | # Microsoft Azure Language Understanding (LUIS) - Build App 2 | # 3 | # This script queries a public LUIS app for IoT using the Python 4 | # LUIS SDK. 5 | # 6 | # This script requires the Cognitive Services LUIS Python module: 7 | # python -m pip install azure-cognitiveservices-language-luis 8 | # 9 | # This script runs under Python 3.4 or later. 10 | 11 | # For more information about LUIS, see 12 | # 13 | # https://www.luis.ai/welcome 14 | # https://docs.microsoft.com/en-us/azure/cognitive-services/luis 15 | 16 | # 17 | from azure.cognitiveservices.language.luis.runtime import LUISRuntimeClient 18 | from msrest.authentication import CognitiveServicesCredentials 19 | 20 | import datetime, json, os, time 21 | # 22 | 23 | # 24 | runtime_key = 'PASTE_YOUR_LUIS_PREDICTION_SUBSCRIPTION_KEY_HERE' 25 | print("runtime_key: {}".format(runtime_key)) 26 | 27 | runtime_endpoint = 'PASTE_YOUR_LUIS_PREDICTION_ENDPOINT_HERE' 28 | print("runtime_endpoint: {}".format(runtime_endpoint)) 29 | # 30 | 31 | # 32 | # Use public app ID or replace with your own trained and published app's ID 33 | # to query your own app 34 | # public appID = 'df67dcdb-c37d-46af-88e1-8b97951ca1c2' 35 | luisAppID = 'PASTE_YOUR_LUIS_APP_ID_HERE' 36 | print("luisAppID: {}".format(luisAppID)) 37 | 38 | # `production` or `staging` 39 | luisSlotName = 'production' 40 | print("luisSlotName: {}".format(luisSlotName)) 41 | # 42 | 43 | 44 | # 45 | # Instantiate a LUIS runtime client 46 | clientRuntime = LUISRuntimeClient(runtime_endpoint, CognitiveServicesCredentials(runtime_key)) 47 | # 48 | 49 | 50 | # 51 | def predict(app_id, slot_name): 52 | 53 | request = { "query" : "turn on all lights" } 54 | 55 | # Note be sure to specify, using the slot_name parameter, whether your application is in staging or production. 56 | response = clientRuntime.prediction.get_slot_prediction(app_id=app_id, slot_name=slot_name, prediction_request=request) 57 | 58 | print("Top intent: {}".format(response.prediction.top_intent)) 59 | print("Sentiment: {}".format (response.prediction.sentiment)) 60 | print("Intents: ") 61 | 62 | for intent in response.prediction.intents: 63 | print("\t{}".format (json.dumps (intent))) 64 | print("Entities: {}".format (response.prediction.entities)) 65 | # 66 | 67 | predict(luisAppID, luisSlotName) 68 | -------------------------------------------------------------------------------- /python/LUIS/python-sdk-authoring-prediction/readme.md: -------------------------------------------------------------------------------- 1 | # How to run this sample 2 | 3 | The Python v3.x files create and query a Language Understanding (LUIS) app, printing SDK method results to the console when appropriate. 4 | 5 | ## Create LUIS app 6 | 7 | 1. Add your environment variables for LUIS: 8 | 9 | * LUIS_AUTHORING_KEY 10 | * LUIS_AUTHORING_ENDPOINT 11 | 12 | 1. Run `python application_quickstart.py` to run the file. 13 | 14 | The output sent to the console is: 15 | 16 | ```console 17 | Creating application... 18 | Created LUIS app Contoso 2020-01-21 22:43:10.741412 19 | with ID 916a0c7c-27eb-4637-be14-7c3264c65b52 20 | 21 | Adding entities to application... 22 | destinationEntityId 2c8a1a26-591d-4861-99d2-8a718e526188 added. 23 | classEntityId fe928009-3e22-4432-be4d-09d05ee3a68d added. 24 | flightEntityId 90a5eb54-95ed-42c7-af14-45e363ed9ecd added. 25 | 26 | Adding intents to application... 27 | Intent FindFlights 92dc1db5-8966-4cd8-bfed-1b0eff430756 added. 28 | 29 | Adding utterances to application... 30 | 3 example utterance(s) added. 31 | 32 | Training application... 33 | Waiting 10 seconds for training to complete... 34 | 35 | Publishing application... 36 | Application published. Endpoint URL: https://westus.api.cognitive.microsoft.com/luis/v2.0/apps/916a0c7c-27eb-4637-be14-7c3264c65b52 37 | ``` 38 | 39 | ## Query LUIS app 40 | 41 | 1. Add your environment variables for LUIS: 42 | 43 | * LUIS_RUNTIME_KEY 44 | * LUIS_RUNTIME_ENDPOINT 45 | * LUIS_APP_ID - The public LUIS IoT app ID is `df67dcdb-c37d-46af-88e1-8b97951ca1c2`. 46 | * LUIS_APP_SLOT_NAME - The `LUIS_APP_SLOT_NAME` choices are `production` or `staging`. 47 | 48 | 1. Run `python prediction_quickstart.py` to run the file. 49 | 50 | The output is for the `turn on all lights` query is: 51 | 52 | ```console 53 | Top intent: HomeAutomation.TurnOn 54 | Sentiment: None 55 | Intents: 56 | "HomeAutomation.TurnOn" 57 | Entities: {'HomeAutomation.Operation': ['on']} 58 | ``` 59 | -------------------------------------------------------------------------------- /python/LUIS/readme.md: -------------------------------------------------------------------------------- 1 | # How to run this sample 2 | 3 | The Python v3.x files create and query a Language Understanding (LUIS) app, printing SDK method results to the console when appropriate. 4 | 5 | ## Create LUIS app 6 | 7 | 1. Add your variables for LUIS as strings. 8 | 9 | 1. Run `python application_quickstart.py` to run the file. 10 | 11 | The output sent to the console is: 12 | 13 | ```console 14 | Creating application... 15 | Created LUIS app Contoso 2020-01-21 22:43:10.741412 16 | with ID 916a0c7c-27eb-4637-be14-7c3264c65b52 17 | 18 | Adding entities to application... 19 | destinationEntityId 2c8a1a26-591d-4861-99d2-8a718e526188 added. 20 | classEntityId fe928009-3e22-4432-be4d-09d05ee3a68d added. 21 | flightEntityId 90a5eb54-95ed-42c7-af14-45e363ed9ecd added. 22 | 23 | Adding intents to application... 24 | Intent FindFlights 92dc1db5-8966-4cd8-bfed-1b0eff430756 added. 25 | 26 | Adding utterances to application... 27 | 3 example utterance(s) added. 28 | 29 | Training application... 30 | Waiting 10 seconds for training to complete... 31 | 32 | Publishing application... 33 | Application published. Endpoint URL: https://westus.api.cognitive.microsoft.com/luis/v2.0/apps/916a0c7c-27eb-4637-be14-7c3264c65b52 34 | ``` 35 | 36 | ## Query LUIS app 37 | 38 | 1. Add your variables for LUIS as strings. 39 | 40 | * LUIS_RUNTIME_KEY 41 | * LUIS_RUNTIME_ENDPOINT 42 | * LUIS_APP_ID - The public LUIS IoT app ID is `df67dcdb-c37d-46af-88e1-8b97951ca1c2`. 43 | * LUIS_APP_SLOT_NAME - The `LUIS_APP_SLOT_NAME` choices are `production` or `staging`. 44 | 45 | 1. Run `python prediction_quickstart.py` to run the file. 46 | 47 | The output is for the `turn on all lights` query is: 48 | 49 | ```console 50 | Top intent: HomeAutomation.TurnOn 51 | Sentiment: None 52 | Intents: 53 | "HomeAutomation.TurnOn" 54 | Entities: {'HomeAutomation.Operation': ['on']} 55 | ``` 56 | -------------------------------------------------------------------------------- /python/LUIS/sdk-3x/.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------------------------------------- 2 | # Copyright (c) Microsoft Corporation. All rights reserved. 3 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 4 | #------------------------------------------------------------------------------------------------------------- 5 | 6 | # Update the VARIANT arg in devcontainer.json to pick a Python version: 3, 3.8, 3.7, 3.6 7 | # To fully customize the contents of this image, use the following Dockerfile instead: 8 | # https://github.com/microsoft/vscode-dev-containers/tree/v0.128.0/containers/python-3/.devcontainer/base.Dockerfile 9 | ARG VARIANT="3" 10 | FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} 11 | 12 | # [Optional] If your requirements rarely change, uncomment this section to add them to the image. 13 | # 14 | # COPY requirements.txt /tmp/pip-tmp/ 15 | # RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ 16 | # && rm -rf /tmp/pip-tmp 17 | 18 | # [Optional] Uncomment this section to install additional packages. 19 | # 20 | # RUN apt-get update \ 21 | # && export DEBIAN_FRONTEND=noninteractive \ 22 | # && apt-get -y install --no-install-recommends 23 | 24 | -------------------------------------------------------------------------------- /python/LUIS/sdk-3x/.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/vscode-remote/devcontainer.json or this file's README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.128.0/containers/python-3 3 | { 4 | "name": "Python 3", 5 | "build": { 6 | "dockerfile": "Dockerfile", 7 | "context": "..", 8 | // Update 'VARIANT' to pick a Python version. Rebuild the container 9 | // if it already exists to update. Available variants: 3, 3.6, 3.7, 3.8 10 | "args": { "VARIANT": "3" } 11 | }, 12 | 13 | // Set *default* container specific settings.json values on container create. 14 | "settings": { 15 | "terminal.integrated.shell.linux": "/bin/bash", 16 | "python.pythonPath": "/usr/local/bin/python", 17 | "python.linting.enabled": true, 18 | "python.linting.pylintEnabled": true, 19 | "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", 20 | "python.formatting.blackPath": "/usr/local/py-utils/bin/black", 21 | "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", 22 | "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", 23 | "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", 24 | "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", 25 | "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", 26 | "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", 27 | "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" 28 | }, 29 | 30 | // Add the IDs of extensions you want installed when the container is created. 31 | "extensions": [ 32 | "ms-python.python" 33 | ] 34 | 35 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 36 | // "forwardPorts": [], 37 | 38 | // Use 'postCreateCommand' to run commands after the container is created. 39 | ,"postCreateCommand": "pip3 install azure-cognitiveservices-language-luis", 40 | 41 | // Uncomment to connect as a non-root user. See https://aka.ms/vscode-remote/containers/non-root. 42 | // "remoteUser": "vscode" 43 | } 44 | -------------------------------------------------------------------------------- /python/Multi-Service/autosuggest_cs.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from azure.cognitiveservices.search.autosuggest import AutoSuggestClient 4 | from msrest.authentication import CognitiveServicesCredentials 5 | 6 | ''' 7 | Microsoft Azure Cognitive Services - Bing Autosuggest - Get Search Suggestions 8 | 9 | Uses the general Cognitive Services key/endpoint. It's used when you want to 10 | combine many Cognitive Services with just one authentication key/endpoint. 11 | Services are not combined here, but could be potentially. 12 | 13 | Install the Cognitive Services Bing Autosuggest SDK module: 14 | python -m pip install azure-cognitiveservices-search_autosuggest 15 | 16 | Use Python 3.4+ 17 | ''' 18 | 19 | subscription_key = "PASTE_YOUR_AUTO_SUGGEST_SUBSCRIPTION_KEY_HERE" 20 | endpoint = "PASTE_YOUR_AUTO_SUGGEST_ENDPOINT_HERE" 21 | 22 | ''' 23 | AUTHENTICATE 24 | Create an Autosuggest client. 25 | ''' 26 | credentials = CognitiveServicesCredentials(subscription_key) 27 | autosuggest_client = AutoSuggestClient(endpoint, CognitiveServicesCredentials(subscription_key)) 28 | 29 | ''' 30 | AUTOSUGGEST 31 | This example uses a query term to search for autocompletion suggestions for the term. 32 | ''' 33 | # Returns from the Suggestions class 34 | result = autosuggest_client.auto_suggest('sail') 35 | 36 | # Access all suggestions 37 | suggestions = result.suggestion_groups[0] 38 | 39 | # print results 40 | for suggestion in suggestions.search_suggestions: 41 | print(suggestion.query) 42 | print(suggestion.display_text) 43 | -------------------------------------------------------------------------------- /python/Multi-Service/spell_check_cs.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from azure.cognitiveservices.language.spellcheck import SpellCheckClient 4 | from msrest.authentication import CognitiveServicesCredentials 5 | 6 | ''' 7 | This Bing Spell Check quickstart checks some misspelled words and suggests corrections. 8 | 9 | Prerequisites: 10 | - Install the following modules: 11 | pip install azure.cognitiveservices.language.spellcheck 12 | pip install msrest 13 | 14 | Python SDK: https://docs.microsoft.com/en-us/python/api/overview/azure/cognitiveservices/spellcheck?view=azure-python 15 | ''' 16 | 17 | SUBSCRIPTION_KEY = 'PASTE_YOUR_SPELL_CHECK_SUBSCRIPTION_KEY_HERE' 18 | ENDPOINT = 'PASTE_YOUR_SPELL_CHECK_ENDPOINT_HERE' 19 | 20 | # Create a client 21 | client = SpellCheckClient(ENDPOINT, CognitiveServicesCredentials(SUBSCRIPTION_KEY)) 22 | 23 | try: 24 | # Original query 25 | query = 'bill gtaes was ehre toody' 26 | print('Original query:\n', query) 27 | print() 28 | # Check the query for misspellings 29 | # mode can be 'proof' or 'spell' 30 | result = client.spell_checker(query, mode='proof') 31 | 32 | # Print the suggested corrections 33 | print('Suggested correction:') 34 | for token in result.flagged_tokens: 35 | for suggestion_object in token.suggestions: 36 | print(suggestion_object.suggestion) 37 | 38 | except Exception as err: 39 | print("Encountered exception. {}".format(err)) 40 | -------------------------------------------------------------------------------- /python/Multi-Service/text_analytics_cs.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from azure.ai.textanalytics import TextAnalyticsClient 4 | from azure.core.credentials import AzureKeyCredential 5 | 6 | ''' 7 | Microsoft Azure Cognitive Services Text Analytics - Get sentiment 8 | 9 | Install the Text Analytics SDK from a command prompt or IDE terminal: 10 | python -m pip install --upgrade azure.ai.textanalytics 11 | ''' 12 | 13 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 14 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 15 | 16 | ''' 17 | AUTHENTICATE 18 | Create a Text Analytics client. 19 | ''' 20 | credential = AzureKeyCredential (subscription_key) 21 | text_analytics_client = TextAnalyticsClient (endpoint=endpoint, credential=credential) 22 | 23 | ''' 24 | TEXT ANALYTICS 25 | Gets the sentiment value of a body of text. 26 | Values closer to zero (0.0) indicate a negative sentiment, while values closer to one (1.0) indicate a positive sentiment. 27 | ''' 28 | try: 29 | documents = ["I had the best day of my life.", "This was a waste of my time. The speaker put me to sleep.", "No tengo dinero ni nada que dar...", "L'hotel veneziano era meraviglioso. È un bellissimo pezzo di architettura."] 30 | 31 | response = text_analytics_client.analyze_sentiment(documents=documents) 32 | 33 | for result in response : 34 | print ("Sentiment: " + result.sentiment) 35 | print ("Confidence scores:") 36 | print ("Positive: " + str (result.confidence_scores.positive)) 37 | print ("Neutral: " + str (result.confidence_scores.neutral)) 38 | print ("Negative: " + str (result.confidence_scores.negative)) 39 | print () 40 | 41 | except Exception as err: 42 | print("Encountered exception. {}".format(err)) 43 | -------------------------------------------------------------------------------- /python/Personalizer/azure-notebook/README.md: -------------------------------------------------------------------------------- 1 | # Personalizer Multislot simulation in an Azure notebook 2 | 3 | This tutorial simulates a Multislot Personalizer loop _system_ which suggests which products a customer should buy when displayed in different slots. The users and their preferences are store in a [user dataset](simulated_users.json). Information about the products is also available in a [product dataset](products.json). Information about the slots is also available in a [slot dataset](slots.json). 4 | 5 | Run the system for 25,000 requests and then create graph showing how fast and accurately the system learned. 6 | 7 | Run an offline counterfactual evaluation to select an optimized learning policy, and apply that policy. 8 | 9 | Run the system again, but for 5,000 requests and again create the graph showing the accuracy of the system. 10 | 11 | ## Prerequisites 12 | 13 | * [Azure notebooks](https://notebooks.azure.com/) account 14 | * [Personalizer resource](https://ms.portal.azure.com/#create/Microsoft.CognitiveServicesPersonalizer) 15 | 16 | ## How to use this sample 17 | 18 | All the instructions are in the notebook. Here is an abbreviated explanation. 19 | 20 | 1. Create a new Azure notebook project. 21 | 1. Upload the files in this directory to the Azure notebook project. 22 | 1. Open the MultislotPersonalizer.ipynb file and change the following values: 23 | 24 | * The value for `` in the `personalization_base_url` to the value for your Personalizer resource 25 | * The value for `` variable to one of the Personalizer resource keys. 26 | 27 | 1. Run each cell from top to bottom. Wait until each cell is complete before running the next cell. 28 | 29 | ## References 30 | 31 | * [Tutorial on docs.microsoft.com](https://docs.microsoft.com/azure/cognitive-services/personalizer/tutorial-use-azure-notebook-generate-loop-data) -------------------------------------------------------------------------------- /python/Personalizer/azure-notebook/example-rankrequest.json: -------------------------------------------------------------------------------- 1 | { 2 | "contextFeatures": [ 3 | ], 4 | "actions": [ 5 | ], 6 | "slots": [ 7 | ], 8 | "excludedActions": [ 9 | ], 10 | "eventId": "", 11 | "deferActivation": false 12 | } -------------------------------------------------------------------------------- /python/Personalizer/azure-notebook/products.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": "Red-Polo-Shirt-432", 4 | "features": [ 5 | { 6 | "onSale": true, 7 | "price": 20, 8 | "category": "Clothing" 9 | } 10 | ] 11 | }, 12 | { 13 | "id": "Tennis-Racket-133", 14 | "features": [ 15 | { 16 | "onSale": false, 17 | "price": 70, 18 | "category": "Sports" 19 | } 20 | ] 21 | }, 22 | { 23 | "id": "31-Inch-Monitor-771", 24 | "features": [ 25 | { 26 | "onSale": true, 27 | "price": 200, 28 | "category": "Electronics" 29 | } 30 | ] 31 | }, 32 | { 33 | "id": "XBox-Series X-117", 34 | "features": [ 35 | { 36 | "onSale": false, 37 | "price": 499, 38 | "category": "Electronics" 39 | } 40 | ] 41 | } 42 | ] -------------------------------------------------------------------------------- /python/Personalizer/azure-notebook/slots.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": "BigHeroPosition", 4 | "features": [ 5 | { 6 | "size": "large", 7 | "position": "left" 8 | } 9 | ], 10 | "baselineAction": "Red-Polo-Shirt-432" 11 | }, 12 | { 13 | "id": "SmallSidebar", 14 | "features": [ 15 | { 16 | "size": "small", 17 | "position": "right" 18 | } 19 | ], 20 | "baselineAction": "Tennis-Racket-133" 21 | } 22 | ] -------------------------------------------------------------------------------- /python/Personalizer/multislot-quickstart/readme.md: -------------------------------------------------------------------------------- 1 | # Python 3.x quickstart for multi-slot Personalizer 2 | 3 | Multi-slot personalization (Preview) allows you to target content in web layouts, carousels, and lists where more than one action (such as a product or piece of content) is shown to your users. With Personalizer multi-slot APIs, you can have the AI models in Personalizer learn what user contexts and products drive certain behaviors, considering and learning from the placement in your user interface. For example, Personalizer may learn that certain products or content drive more clicks as a sidebar or a footer than as a main highlight on a page. 4 | 5 | This sample asks for the time of day and device type to determine which items to display on a retail app/website. You can select if that top choice is what you would pick. 6 | 7 | ## Upgrade Persoanlizer instance to multi-slot 8 | 9 | 1. Configure your Personalizer instance for multi-slot (see [Setting up](https://docs.microsoft.com/en-us/azure/cognitive-services/personalizer/how-to-multi-slot?pivots=programming-language-python)) 10 | 11 | ## Run samples 12 | 13 | - You can find your key and endpoint in the resource's key and endpoint page, under resource management (Keys and Endpoint). 14 | 15 | 1. Update PERSONALIZATION_BASE_URL value ("") in sample.py with the endpoint specific to your Personalizer service instance. 16 | 17 | 1. Update RESOURCE_KEY value ("") in sample.py with the key specific to your Personalizer service instance. 18 | 19 | 1. Run app with command: 20 | 21 | ``` 22 | python sample.py 23 | ``` -------------------------------------------------------------------------------- /python/Personalizer/readme.md: -------------------------------------------------------------------------------- 1 | # Python 3.x quickstart for Personalizer 2 | 3 | This sample asks for the time of day and your personalized food taste preference then returns the top personalized food choice. You can select if that top choice is what you would pick. 4 | 5 | ## Run samples 6 | 7 | 1. Install package with pypi: 8 | 9 | ``` 10 | pip install azure-cognitiveservices-personalizer 11 | ``` 12 | 13 | 1. Add variables for Personalizer key and endpoint named `personalizer_key` and `personalizer_endpoint`. 14 | 15 | 1. Run app with command: 16 | 17 | ``` 18 | python sample.py 19 | ``` -------------------------------------------------------------------------------- /python/QnAMaker/rest/publish-kb.py: -------------------------------------------------------------------------------- 1 | # 2 | import http.client, os, sys 3 | from urllib.parse import urlparse 4 | # 5 | 6 | # Set the `subscription_key` and `authoring_endpoint` variables to your 7 | # QnA Maker authoring subscription key and endpoint. 8 | # 9 | # These values can be found in the Azure portal (ms.portal.azure.com/). 10 | # Look up your QnA Maker resource. Then, in the "Resource management" 11 | # section, find the "Keys and Endpoint" page. 12 | # 13 | # The value of `authoring_endpoint` has the format https://YOUR-RESOURCE-NAME.cognitiveservices.azure.com. 14 | # 15 | # Set the `kb_id` variable to the ID of a knowledge base you have 16 | # previously created. 17 | 18 | # 19 | subscription_key = 'PASTE_YOUR_QNA_MAKER_AUTHORING_SUBSCRIPTION_KEY_HERE' 20 | 21 | # Note http.client.HTTPSConnection wants only the host name, not the protocol (that is, 'https://') 22 | authoring_endpoint = urlparse('PASTE_YOUR_QNA_MAKER_AUTHORING_ENDPOINT_HERE').netloc 23 | 24 | kb_id = 'PASTE_YOUR_QNA_MAKER_KB_ID_HERE' 25 | 26 | publish_kb_method = '/qnamaker/v4.0/knowledgebases/' + kb_id 27 | # 28 | 29 | #
30 | try: 31 | headers = { 32 | 'Ocp-Apim-Subscription-Key': subscription_key 33 | } 34 | 35 | conn = http.client.HTTPSConnection(authoring_endpoint,port=443) 36 | conn.request ("POST", publish_kb_method, "", headers) 37 | 38 | response = conn.getresponse () 39 | 40 | # Note status code 204 means success. 41 | print(response.status) 42 | 43 | except : 44 | print ("Unexpected error:", sys.exc_info()[0]) 45 | print ("Unexpected error:", sys.exc_info()[1]) 46 | #
47 | -------------------------------------------------------------------------------- /python/QnAMaker/rest/query-kb.py: -------------------------------------------------------------------------------- 1 | # 2 | import http.client, json, os, sys 3 | from urllib.parse import urlparse 4 | # 5 | 6 | # Set the `authoring_key` and `authoring_endpoint` variables to your 7 | # QnA Maker authoring subscription key and endpoint. 8 | # 9 | # These values can be found in the Azure portal (ms.portal.azure.com/). 10 | # Look up your QnA Maker resource. Then, in the "Resource management" 11 | # section, find the "Keys and Endpoint" page. 12 | # 13 | # The value of `authoring_endpoint` has the format https://YOUR-RESOURCE-NAME.cognitiveservices.azure.com. 14 | # 15 | # Set the `runtime_endpoint` variable to your QnA Maker runtime endpoint. 16 | # The value of `runtime_endpoint` has the format https://YOUR-RESOURCE-NAME.azurewebsites.net. 17 | # 18 | # Set the `kb_id` variable to the ID of a knowledge base you have 19 | # previously created. 20 | 21 | # 22 | subscription_key = 'PASTE_YOUR_QNA_MAKER_AUTHORING_SUBSCRIPTION_KEY_HERE' 23 | 24 | # Note http.client.HTTPSConnection wants only the host name, not the protocol (that is, 'https://') 25 | authoring_endpoint = urlparse('PASTE_YOUR_QNA_MAKER_AUTHORING_ENDPOINT_HERE').netloc 26 | 27 | runtime_endpoint = urlparse('PASTE_YOUR_QNA_MAKER_RUNTIME_ENDPOINT_HERE').netloc 28 | 29 | kb_id = 'PASTE_YOUR_QNA_MAKER_KB_ID_HERE' 30 | 31 | get_endpoint_key_method = "/qnamaker/v4.0/endpointKeys" 32 | 33 | query_kb_method = "/qnamaker/knowledgebases/" + kb_id + "/generateAnswer"; 34 | 35 | # JSON format for passing question to service 36 | question = "{'question': 'Is the QnA Maker Service free?','top': 3}"; 37 | # 38 | 39 | #
40 | try: 41 | authoring_conn = http.client.HTTPSConnection(authoring_endpoint,port=443) 42 | headers = { 43 | 'Ocp-Apim-Subscription-Key': subscription_key 44 | } 45 | authoring_conn.request ("GET", get_endpoint_key_method, "", headers) 46 | response = authoring_conn.getresponse () 47 | endpoint_key = json.loads(response.read())["primaryEndpointKey"] 48 | 49 | runtime_conn = http.client.HTTPSConnection(runtime_endpoint,port=443) 50 | headers = { 51 | # Note this differs from the "Ocp-Apim-Subscription-Key"/ used by most Cognitive Services. 52 | 'Authorization': 'EndpointKey ' + endpoint_key, 53 | 'Content-Type': 'application/json' 54 | } 55 | runtime_conn.request ("POST", query_kb_method, question, headers) 56 | response = runtime_conn.getresponse () 57 | answer = response.read () 58 | print(json.dumps(json.loads(answer), indent=4)) 59 | 60 | except : 61 | print ("Unexpected error:", sys.exc_info()[0]) 62 | print ("Unexpected error:", sys.exc_info()[1]) 63 | #
64 | -------------------------------------------------------------------------------- /rest/FormRecognizer/ignore/ignore.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /rest/test/test.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /ruby/BingSpellCheck/quickstart.rb: -------------------------------------------------------------------------------- 1 | require 'azure_cognitiveservices_spellcheck' 2 | 3 | # encoding: utf-8 4 | 5 | # This sample does the following tasks: 6 | # - Gets spell check suggestions. 7 | 8 | # To run this sample, please install the required packages by running the following commands at an Administrator command prompt. 9 | # gem install azure_cognitiveservices_spellcheck 10 | # 11 | # For more information about how to use the Azure SDK for Ruby, see: 12 | # https://azure.microsoft.com/en-us/resources/samples/cognitive-services-ruby-sdk-samples/ 13 | 14 | subscription_key = 'PASTE_YOUR_SPELL_CHECK_SUBSCRIPTION_KEY_HERE' 15 | endpoint = 'PASTE_YOUR_SPELL_CHECK_ENDPOINT_HERE' 16 | 17 | client = Azure::CognitiveServices::SpellCheck::V1_0::SpellCheckClient.new() 18 | client.credentials = MsRestAzure::CognitiveServicesCredentials.new(subscription_key) 19 | client.endpoint = endpoint 20 | 21 | result = client.spell_checker("bill gtaes") 22 | puts "Results:" 23 | result.flagged_tokens.each do |token| 24 | puts "Offset: #{token.offset}\nToken: #{token.token }\nType: #{token.type}" 25 | token.suggestions.each do |suggestion| 26 | puts "Suggestion: " + suggestion.suggestion 27 | puts "Score: #{suggestion.score}" 28 | end 29 | puts "" 30 | end 31 | -------------------------------------------------------------------------------- /ruby/ComputerVision/analyze-image/analyze-image.rb: -------------------------------------------------------------------------------- 1 | require 'net/http' 2 | 3 | uri = URI('PASTE_YOUR_COMPUTER_VISION_ENDPOINT_HERE' + 'vision/v3.1/analyze') 4 | uri.query = URI.encode_www_form({ 5 | # Request parameters 6 | 'visualFeatures' => 'Categories, Description', 7 | 'details' => 'Landmarks', 8 | 'language' => 'en' 9 | }) 10 | 11 | request = Net::HTTP::Post.new(uri.request_uri) 12 | 13 | # Request headers 14 | # Replace with your valid key. 15 | request['Ocp-Apim-Subscription-Key'] = 'PASTE_YOUR_COMPUTER_VISION_KEY_HERE' 16 | request['Content-Type'] = 'application/json' 17 | 18 | request.body = 19 | "{\"url\": \"http://upload.wikimedia.org/wikipedia/commons/3/3c/Shaki_waterfall.jpg\"}" 20 | 21 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 22 | http.request(request) 23 | end 24 | 25 | puts response.body -------------------------------------------------------------------------------- /ruby/ComputerVision/extract-printed-text/get-printed-text.rb: -------------------------------------------------------------------------------- 1 | require 'net/http' 2 | 3 | uri = URI('PASTE_YOUR_COMPUTER_VISION_ENDPOINT_HERE' + '/vision/v3.1/ocr') 4 | uri.query = URI.encode_www_form({ 5 | # Request parameters 6 | 'language' => 'unk', 7 | 'detectOrientation' => 'true' 8 | }) 9 | 10 | request = Net::HTTP::Post.new(uri.request_uri) 11 | 12 | # Request headers 13 | # Replace with your valid key. 14 | request['Ocp-Apim-Subscription-Key'] = 'PASTE_YOUR_COMPUTER_VISION_KEY_HERE' 15 | request['Content-Type'] = 'application/json' 16 | 17 | request.body = 18 | "{\"url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/a/af/" + 19 | "Atomist_quote_from_Democritus.png/338px-Atomist_quote_from_Democritus.png\"}" 20 | 21 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 22 | http.request(request) 23 | end 24 | 25 | puts response.body -------------------------------------------------------------------------------- /ruby/ComputerVision/generate-a-thumbnail/get-thumbnail.rb: -------------------------------------------------------------------------------- 1 | require 'net/http' 2 | 3 | uri = URI('PASTE_YOUR_COMPUTER_VISION_ENDPOINT_HERE' + 'vision/v3.1/generateThumbnail') 4 | uri.query = URI.encode_www_form({ 5 | # Request parameters 6 | 'width' => '100', 7 | 'height' => '100', 8 | 'smartCropping' => 'true' 9 | }) 10 | 11 | request = Net::HTTP::Post.new(uri.request_uri) 12 | 13 | # Request headers 14 | # Replace with your valid key. 15 | request['Ocp-Apim-Subscription-Key'] = 'PASTE_YOUR_COMPUTER_VISION_KEY_HERE' 16 | request['Content-Type'] = 'application/json' 17 | 18 | request.body = 19 | "{\"url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/5/56/" + 20 | "Shorkie_Poo_Puppy.jpg/1280px-Shorkie_Poo_Puppy.jpg\"}" 21 | 22 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 23 | http.request(request) 24 | end 25 | 26 | #puts response.body -------------------------------------------------------------------------------- /ruby/TextAnalytics/REST/AnalyzeSentiment.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | 3 | require 'net/https' 4 | require 'uri' 5 | require 'json' 6 | 7 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 8 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 9 | 10 | path = '/text/analytics/v3.0/sentiment' 11 | 12 | uri = URI(endpoint + path) 13 | 14 | documents = { 'documents': [ 15 | { 'id' => '1', 'language' => 'en', 'text' => 'I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable.' }, 16 | { 'id' => '2', 'language' => 'es', 'text' => 'Este ha sido un dia terrible, llegué tarde al trabajo debido a un accidente automobilistico.' } 17 | ]} 18 | 19 | puts 'Please wait a moment for the results to appear.' 20 | 21 | request = Net::HTTP::Post.new(uri) 22 | request['Content-Type'] = "application/json" 23 | request['Ocp-Apim-Subscription-Key'] = subscription_key 24 | request.body = documents.to_json 25 | 26 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 27 | http.request (request) 28 | end 29 | 30 | puts JSON::pretty_generate (JSON (response.body)) -------------------------------------------------------------------------------- /ruby/TextAnalytics/REST/ExtractKeyPhrases.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | 3 | require 'net/https' 4 | require 'uri' 5 | require 'json' 6 | 7 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 8 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 9 | 10 | path = '/text/analytics/v3.0/keyPhrases' 11 | 12 | uri = URI(endpoint + path) 13 | 14 | documents = { 'documents': [ 15 | { 'id' => '1', 'language' => 'en', 'text' => 'I really enjoy the new XBox One S. It has a clean look, it has 4K/HDR resolution and it is affordable.' }, 16 | { 'id' => '2', 'language' => 'es', 'text' => 'Si usted quiere comunicarse con Carlos, usted debe de llamarlo a su telefono movil. Carlos es muy responsable, pero necesita recibir una notificacion si hay algun problema.' }, 17 | { 'id' => '3', 'language' => 'en', 'text' => 'The Grand Hotel is a new hotel in the center of Seattle. It earned 5 stars in my review, and has the classiest decor I\'ve ever seen.' }, 18 | ]} 19 | 20 | puts 'Please wait a moment for the results to appear.' 21 | 22 | request = Net::HTTP::Post.new(uri) 23 | request['Content-Type'] = "application/json" 24 | request['Ocp-Apim-Subscription-Key'] = subscription_key 25 | request.body = documents.to_json 26 | 27 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 28 | http.request (request) 29 | end 30 | 31 | puts JSON::pretty_generate (JSON (response.body)) -------------------------------------------------------------------------------- /ruby/TextAnalytics/REST/RecognizeEntities.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | 3 | require 'net/https' 4 | require 'uri' 5 | require 'json' 6 | 7 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 8 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 9 | 10 | path = '/text/analytics/v3.0/entities/recognition/general' 11 | 12 | uri = URI(endpoint + path) 13 | 14 | documents = { 'documents': [ 15 | { 'id' => '1', 'language' => 'en', 'text' => 'Microsoft is an It company.' } 16 | ]} 17 | 18 | puts 'Please wait a moment for the results to appear.' 19 | 20 | request = Net::HTTP::Post.new(uri) 21 | request['Content-Type'] = "application/json" 22 | request['Ocp-Apim-Subscription-Key'] = subscription_key 23 | request.body = documents.to_json 24 | 25 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 26 | http.request (request) 27 | end 28 | 29 | puts JSON::pretty_generate (JSON (response.body)) -------------------------------------------------------------------------------- /ruby/TextAnalytics/REST/detectLanguages.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | 3 | require 'net/https' 4 | require 'uri' 5 | require 'json' 6 | 7 | subscription_key = "PASTE_YOUR_TEXT_ANALYTICS_SUBSCRIPTION_KEY_HERE" 8 | endpoint = "PASTE_YOUR_TEXT_ANALYTICS_ENDPOINT_HERE" 9 | 10 | path = '/text/analytics/v3.0/languages' 11 | 12 | uri = URI(endpoint + path) 13 | 14 | documents = { 'documents': [ 15 | { 'id' => '1', 'text' => 'This is a document written in English.' }, 16 | { 'id' => '2', 'text' => 'Este es un document escrito en Español.' }, 17 | { 'id' => '3', 'text' => '这是一个用中文写的文件' } 18 | ]} 19 | 20 | puts 'Please wait a moment for the results to appear.' 21 | 22 | request = Net::HTTP::Post.new(uri) 23 | request['Content-Type'] = "application/json" 24 | request['Ocp-Apim-Subscription-Key'] = subscription_key 25 | request.body = documents.to_json 26 | 27 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 28 | http.request (request) 29 | end 30 | 31 | puts JSON::pretty_generate (JSON (response.body)) -------------------------------------------------------------------------------- /ruby/face/rest/detect.rb: -------------------------------------------------------------------------------- 1 | require 'net/http' 2 | 3 | subscription_key = 'PASTE_YOUR_FACE_SUBSCRIPTION_KEY_HERE' 4 | endpoint = 'PASTE_YOUR_FACE_ENDPOINT_HERE' 5 | 6 | uri = URI(endpoint + '/face/v1.0/detect') 7 | uri.query = URI.encode_www_form({ 8 | # Request parameters 9 | 'detectionModel' => 'detection_03', 10 | 'returnFaceId' => 'true' 11 | }) 12 | 13 | request = Net::HTTP::Post.new(uri.request_uri) 14 | 15 | # Request headers 16 | request['Ocp-Apim-Subscription-Key'] = subscription_key 17 | request['Content-Type'] = 'application/json' 18 | 19 | imageUri = "https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/faces.jpg" 20 | request.body = "{\"url\": \"" + imageUri + "\"}" 21 | 22 | response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| 23 | http.request(request) 24 | end 25 | 26 | puts response.body 27 | --------------------------------------------------------------------------------