├── .azuredatastudio └── settings.json ├── Dataverse ├── Presentations │ ├── Dataverse is more than just a database.pptx │ ├── Deep Dive - Knowledge in Copilot Studio.pdf │ └── Lightning Talk - Let Knowledge power your copilots.pdf └── readme.md ├── LICENSE ├── Machine Learning Samples ├── Machine Learning Extension in Azure Data Studio │ ├── 01. Model and Database Setup.ipynb │ ├── MLExtension-MakePredictions.mp4 │ ├── README.md │ └── images │ │ └── MLExtensionMakePredictionsYouTubePreview.png └── README.md ├── Notebook Templates ├── DataAnalysisTemplate.ipynb ├── TroubleshootingGuideTemplate.ipynb ├── TroubleshootingGuideTemplateWithAnchors.ipynb └── TutorialTemplate.ipynb ├── Notebooks Presentations ├── All Day DevOps 2021 │ ├── ADDO2021_FromOopsToOpsIncidentResponseWithJupyterNotebooks_JulieKoesmarno.pdf │ └── readme.md ├── Around The Clock 2021 │ ├── BasicKQLForAzureSQLDB-JulieKoesmarno.pptx │ └── README.md ├── C# Corner 2021 │ ├── Executable TSG with Notebooks SQL People - C# Corner 2021.pptx │ └── README.md ├── Cloud Summit 2021 │ ├── FromOopsToOpsIncidentResponseWithJupyterNotebooks.pdf │ └── readme.md ├── Data Platform Summit 2021 │ └── readme.md ├── DataMinds Connect 2020 │ ├── NotebooksForTriageAndIncidentResponse.pptx │ └── README.md ├── GroupBy 2020 │ └── 20200513 - Notebooks 101 for SQL People.pptx ├── LightUp 2020 Virtual Conference │ ├── 20200714 - Notebooks 101 for SQL People.pptx │ └── README.md ├── SQL Saturday 1000 Oregon │ ├── 20201024 - Notebooks 101 for SQL People.pptx │ ├── README.md │ └── media │ │ └── Notebooks101ForSQLPeopleSQLSat1000-thumb.png ├── SQL Saturday 1015 Vienna │ ├── 20210115 - Notebooks 101 for SQL People.pptx │ ├── README.md │ ├── SQLSatVienna - Petrinja, Sisak and Glina.pptx │ └── media │ │ └── Notebooks101ForSQLPeopleSQLSat1015-thumb.png ├── SQL Saturday 1019 Singapore │ ├── 20201128 - Kusto Query Language (KQL) in Azure Data Studio.pptx │ ├── README.md │ └── media │ │ └── KQLinADSForSQLSat1019.png ├── SQL Saturday 997 Salt Lake City │ └── 20200829 - Notebooks 101 for SQL People.pptx ├── Scottish Summit 2021 │ └── README.md ├── Techorama 2021 │ ├── From Oops to Ops Incident Response with Notebooks.pdf │ └── readme.md └── User Groups │ ├── DatabaseProfessionalsVirtualMeetupGroup-20210728.md │ └── Notebooks 101 for SQL People - Barbara & Julie.pdf ├── README.md ├── Simple Demo ├── DotNetInteractive Notebooks │ └── DotNetConfDemo2021.ipynb ├── KQL Notebooks │ ├── AzureMonitorLogsSample.ipynb │ ├── Demo-ExecuteParameterizedNotebookKqlmagicLogAnalyticsWithClientSecret.ipynb │ ├── Demo-KqlmagicLogAnalyticsWithClientSecret.ipynb │ ├── Demo-KqlmagicLogAnalyticsWithUserAuth.ipynb │ ├── Demo-LAConnection.txt │ ├── Demo-NativeKusto-CrossClusterQuery.ipynb │ ├── Demo-NativeKusto-CrossServiceQuery.ipynb │ ├── Demo-ParameterizedKqlmagicLogAnalytics.ipynb │ ├── KqlmagicAzureApplicationInsights.ipynb │ ├── KqlmagicParameterizedQuery.ipynb │ ├── TroubleshootAzCLILoginIssue.ipynb │ └── testKqlmagicHelpAzCLIParam.ipynb ├── Papermill │ ├── KqlmagicParameterizedQuery.ipynb │ ├── StormEventsFlorida.ipynb │ ├── StormEventsWashington.ipynb │ └── readme.md ├── Parameterization │ ├── D365BC │ │ └── D365BC-Performance-overview-TSG.ipynb │ ├── SQLDBLog │ │ ├── AzureAutomationRunbookTutorial.ipynb │ │ ├── AzureSQLLogsAndMetricsWithLogAnalytics.ipynb │ │ └── Demo-ExecuteAzureSQLLogAnalytics.ipynb │ └── readme.md ├── PlotlyTest.ipynb ├── PowerShell Notebooks │ └── SimpleAdditionInPowerShell.ipynb ├── SQL Notebooks │ ├── Blursday.ipynb │ └── DBDiagnostics.ipynb ├── SQL Server 2019 Diagnostic Information Queries.ipynb ├── Sample Notebooks - Data Analysis │ ├── 20200513 - Notebooks 101 for SQL People.pptx │ ├── ADSNotebooksDemo.ipynb │ ├── KqlmagicDemo.ipynb │ ├── NativeKusto-AggregatesInKusto.kql │ ├── NativeKusto-MLKustoNotebook.ipynb │ ├── NativeKusto-SimpleKustoNotebook.ipynb │ ├── PythonNotebook.gif │ ├── ReproducibleResearch.ipynb │ ├── SQLMagicWithPython.ipynb │ ├── SampleSQLNotebook-ExecSP.ipynb │ ├── WWIReproducibleResearch Vol 1.ipynb │ ├── images │ │ ├── ADSHeartNotebooks.png │ │ ├── AzureDataStudioLogo.png │ │ ├── DBA.png │ │ ├── DataAnalyst.png │ │ ├── DataEngineer.png │ │ ├── DataScientist.png │ │ ├── DatabaseDeveloper.png │ │ ├── SQLNotebookExample3.gif │ │ └── markdown.png │ └── readme.md ├── SimplePythonNotebook.ipynb ├── SimpleSQLNotebook.ipynb └── Troubleshooting │ └── Deadlock │ ├── Deadlock-1.ipynb │ └── Deadlock-2.ipynb └── Useful Notebooks ├── ADSKeyboardShortcuts.ipynb ├── ADSMarkdownCheatsheet.ipynb ├── CheckKqlmagicInstallation.ipynb ├── ConvertTigerToolboxSQLToNotebook.ipynb ├── CreateJupyterBookTigerToolbox.ipynb ├── DemoConvertToNotebooks.ipynb ├── Top10Tips.ipynb └── Using_ConvertTo-SQLNoteBook.ipynb /.azuredatastudio/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "notebook.pinnedNotebooks": [ 3 | { 4 | "notebookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Sample Notebooks - Data Analysis/ADSNotebooksDemo.ipynb", 5 | "bookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Sample Notebooks - Data Analysis" 6 | }, 7 | { 8 | "notebookPath": "c:/Temp/jubilant-data-wizards/Notebook Templates/TroubleshootingGuideTemplateWithAnchors.ipynb", 9 | "bookPath": "c:/Temp/jubilant-data-wizards/Notebook Templates", 10 | "title": "TroubleshootingGuideTemplateWithAnchors" 11 | }, 12 | { 13 | "notebookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb", 14 | "bookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog", 15 | "title": "AzureSQLLogsAndMetricsWithLogAnalytics" 16 | }, 17 | { 18 | "notebookPath": "c:/Temp/jubilant-data-wizards/Useful Notebooks/DemoConvertToNotebooks.ipynb", 19 | "bookPath": "c:/Temp/jubilant-data-wizards/Useful Notebooks", 20 | "title": "DemoConvertToNotebooks" 21 | }, 22 | { 23 | "notebookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog/AzureMonitorLogsSample.ipynb", 24 | "bookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog", 25 | "title": "AzureMonitorLogsSample" 26 | }, 27 | { 28 | "notebookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog/Demo-ExecuteAzureSQLLogAnalytics.ipynb", 29 | "bookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog", 30 | "title": "Demo-ExecuteAzureSQLLogAnalytics" 31 | }, 32 | { 33 | "notebookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog/AzureAutomationRunbookTutorial.ipynb", 34 | "bookPath": "c:/Temp/jubilant-data-wizards/Simple Demo/Parameterization/SQLDBLog", 35 | "title": "AzureAutomationRunbookTutorial" 36 | } 37 | ] 38 | } -------------------------------------------------------------------------------- /Dataverse/Presentations/Dataverse is more than just a database.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Dataverse/Presentations/Dataverse is more than just a database.pptx -------------------------------------------------------------------------------- /Dataverse/Presentations/Deep Dive - Knowledge in Copilot Studio.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Dataverse/Presentations/Deep Dive - Knowledge in Copilot Studio.pdf -------------------------------------------------------------------------------- /Dataverse/Presentations/Lightning Talk - Let Knowledge power your copilots.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Dataverse/Presentations/Lightning Talk - Let Knowledge power your copilots.pdf -------------------------------------------------------------------------------- /Dataverse/readme.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Dataverse/readme.md -------------------------------------------------------------------------------- /Machine Learning Samples/Machine Learning Extension in Azure Data Studio/01. Model and Database Setup.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "python3", 5 | "display_name": "Python 3" 6 | }, 7 | "language_info": { 8 | "name": "python", 9 | "version": "3.7.5", 10 | "mimetype": "text/x-python", 11 | "codemirror_mode": { 12 | "name": "ipython", 13 | "version": 3 14 | }, 15 | "pygments_lexer": "ipython3", 16 | "nbconvert_exporter": "python", 17 | "file_extension": ".py" 18 | } 19 | }, 20 | "nbformat_minor": 2, 21 | "nbformat": 4, 22 | "cells": [ 23 | { 24 | "cell_type": "markdown", 25 | "source": [ 26 | "Work in progress .... " 27 | ], 28 | "metadata": { 29 | "azdata_cell_guid": "f7f1e363-7c1a-4851-8267-84ae6aa9c6db" 30 | } 31 | }, 32 | { 33 | "cell_type": "code", 34 | "source": [ 35 | "pip install numpy --no-cache-dir --upgrade" 36 | ], 37 | "metadata": { 38 | "azdata_cell_guid": "a4539d8a-e833-4e58-99a6-87e249f60702" 39 | }, 40 | "outputs": [], 41 | "execution_count": null 42 | }, 43 | { 44 | "cell_type": "code", 45 | "source": [ 46 | "pip install onnxmltools --no-cache-dir --upgrade" 47 | ], 48 | "metadata": { 49 | "azdata_cell_guid": "02b6e9eb-4611-4475-b3b1-496a9b9b080c" 50 | }, 51 | "outputs": [], 52 | "execution_count": null 53 | }, 54 | { 55 | "cell_type": "markdown", 56 | "source": [ 57 | "" 58 | ], 59 | "metadata": { 60 | "azdata_cell_guid": "45c888af-f5e2-487d-b2b0-09b0d76208ca" 61 | } 62 | }, 63 | { 64 | "cell_type": "code", 65 | "source": [ 66 | "pip install onnxruntime --no-cache-dir --upgrade" 67 | ], 68 | "metadata": { 69 | "azdata_cell_guid": "9a19fbfd-bf15-4d3f-a076-0523d2576a45" 70 | }, 71 | "outputs": [], 72 | "execution_count": null 73 | }, 74 | { 75 | "cell_type": "code", 76 | "source": [ 77 | "pip install pandas --no-cache-dir --upgrade" 78 | ], 79 | "metadata": { 80 | "azdata_cell_guid": "5f0e9e35-8c50-4f4d-b0db-2b7bce4e104b" 81 | }, 82 | "outputs": [], 83 | "execution_count": null 84 | }, 85 | { 86 | "cell_type": "code", 87 | "source": [ 88 | "pip install skl2onnx --no-cache-dir --upgrade" 89 | ], 90 | "metadata": { 91 | "azdata_cell_guid": "84f441d4-1e50-41d2-b01e-84a56c10bf25" 92 | }, 93 | "outputs": [], 94 | "execution_count": null 95 | }, 96 | { 97 | "cell_type": "code", 98 | "source": [ 99 | "pip install sklearn --no-cache-dir --upgrade" 100 | ], 101 | "metadata": { 102 | "azdata_cell_guid": "f817dc01-ca58-4faa-9951-64c6a2fc71dc" 103 | }, 104 | "outputs": [], 105 | "execution_count": null 106 | }, 107 | { 108 | "cell_type": "code", 109 | "source": [ 110 | "import numpy as np\r\n", 111 | "import onnxmltools\r\n", 112 | "import onnxruntime as rt\r\n", 113 | "import pandas as pd\r\n", 114 | "import skl2onnx\r\n", 115 | "import sklearn\r\n", 116 | "import sklearn.datasets\r\n", 117 | "\r\n", 118 | "from sklearn.datasets import load_boston\r\n", 119 | "boston = load_boston()\r\n", 120 | "boston\r\n", 121 | "\r\n", 122 | "df = pd.DataFrame(data=np.c_[boston['data'], boston['target']], columns=boston['feature_names'].tolist() + ['MEDV'])\r\n", 123 | " \r\n", 124 | "target_column = 'MEDV'\r\n", 125 | " \r\n", 126 | "# Split the data frame into features and target\r\n", 127 | "x_train = pd.DataFrame(df.drop([target_column], axis = 1))\r\n", 128 | "y_train = pd.DataFrame(df.iloc[:,df.columns.tolist().index(target_column)])\r\n", 129 | "\r\n", 130 | "print(\"\\n*** Training dataset x\\n\")\r\n", 131 | "print(x_train.head())\r\n", 132 | "\r\n", 133 | "print(\"\\n*** Training dataset y\\n\")\r\n", 134 | "print(y_train.head())" 135 | ], 136 | "metadata": { 137 | "azdata_cell_guid": "32662c77-3cb3-488d-8a72-f4815c4e370f" 138 | }, 139 | "outputs": [], 140 | "execution_count": null 141 | }, 142 | { 143 | "cell_type": "code", 144 | "source": [ 145 | "" 146 | ], 147 | "metadata": { 148 | "azdata_cell_guid": "b2235e4e-8bc9-49d5-ba09-71c506e4f2c0" 149 | }, 150 | "outputs": [], 151 | "execution_count": null 152 | } 153 | ] 154 | } -------------------------------------------------------------------------------- /Machine Learning Samples/Machine Learning Extension in Azure Data Studio/MLExtension-MakePredictions.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Machine Learning Samples/Machine Learning Extension in Azure Data Studio/MLExtension-MakePredictions.mp4 -------------------------------------------------------------------------------- /Machine Learning Samples/Machine Learning Extension in Azure Data Studio/README.md: -------------------------------------------------------------------------------- 1 | # Machine Learning Extension in Azure Data Studio 2 | 3 | In this example, we will use Boston dataset and ONNX model. 4 | 5 | Microsoft docs: http://aka.ms/AzureDataStudioML 6 | 7 | Video: 8 | 1. [Make Predictions using Machine Learning Extension (Preview) in Azure Data Studio](./MLExtension-MakePredictions.mp4))\ 9 | [![Watch the video on YouTube](./images/MLExtensionMakePredictionsYouTubePreview.png)](https://youtu.be/DUgm-MlPqGc) 10 | -------------------------------------------------------------------------------- /Machine Learning Samples/Machine Learning Extension in Azure Data Studio/images/MLExtensionMakePredictionsYouTubePreview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Machine Learning Samples/Machine Learning Extension in Azure Data Studio/images/MLExtensionMakePredictionsYouTubePreview.png -------------------------------------------------------------------------------- /Machine Learning Samples/README.md: -------------------------------------------------------------------------------- 1 | # Machine Learning Sample files 2 | 3 | As Azure Data Studio is continually supporting Machine Learning workflow, I'll post useful sample files here. 4 | 5 | ## Latest updates (in descending order) 6 | 1. [Machine Learning Extension (Preview) in Azure Data Studio](https://docs.microsoft.com/en-us/sql/azure-data-studio/machine-learning-extension?view=sql-server-ver15#:~:text=%20Machine%20Learning%20extension%20%28preview%29%20for%20Azure%20Data,the%20settings%20for%20the%20Machine%20Learning...%20More%20) May 2020\ 7 | Sample files: 8 | -------------------------------------------------------------------------------- /Notebook Templates/DataAnalysisTemplate.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Analysis Title\r\n", 20 | "_Last updated:_ \\\r\n", 21 | "_Author(s):_ \r\n", 22 | "\r\n", 23 | "> **Tip**: Great analysis document should contain sufficient information to enable others to:\r\n", 24 | "* assess observations,\r\n", 25 | "* repeat the analysis / experiments, and\r\n", 26 | "* evaluate the methods (and suggests improvements).\r\n", 27 | "\r\n", 28 | "## Abstract / Executive Summary\r\n", 29 | "* Describe briefly about the purpose of this paper, for example: recommendation for next steps, framework, null hypothesis statement, etc.\r\n", 30 | "\r\n", 31 | "## Introduction\r\n", 32 | "* Describe the problem statement.\r\n", 33 | "* Describe briefly the approach / steps, including how readers can reach to conclusion.\r\n", 34 | "* Describe potential alternatives or other areas that are out of scope of this paper.\r\n", 35 | "\r\n", 36 | "### Assumptions\r\n", 37 | "* Describe the assumptions for this paper.\r\n", 38 | "\r\n", 39 | "## Datasets\r\n", 40 | "* Describe the datasets used.\r\n", 41 | "* Provide code on how to get the exact same datasets.\r\n", 42 | "* Provide information on what the columns in the datasets that will be used; this can be a link to a data catalog. \r\n", 43 | "\r\n", 44 | "\r\n", 45 | "" 46 | ], 47 | "metadata": { 48 | "azdata_cell_guid": "1e5ca23a-5122-4c35-8e06-4c6407baff15" 49 | } 50 | }, 51 | { 52 | "cell_type": "code", 53 | "source": [ 54 | "-- Insert the code to retrieve the dataset" 55 | ], 56 | "metadata": { 57 | "azdata_cell_guid": "881adad7-e782-4e9c-adaf-09200259d068" 58 | }, 59 | "outputs": [], 60 | "execution_count": null 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "source": [ 65 | "## Step (n) - (Description)\r\n", 66 | "* Describe the first step.\r\n", 67 | "* Provide the code to get the results. " 68 | ], 69 | "metadata": { 70 | "azdata_cell_guid": "ccf118ee-4800-4d83-8089-92d98c7ec985" 71 | } 72 | }, 73 | { 74 | "cell_type": "code", 75 | "source": [ 76 | "-- Code to repro the approach." 77 | ], 78 | "metadata": { 79 | "azdata_cell_guid": "f8f945f4-c7e9-461f-95b4-4c5aaa275d34" 80 | }, 81 | "outputs": [], 82 | "execution_count": null 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "source": [ 87 | "## Conclusion / Recommendation\r\n", 88 | "* To close the analysis, provide conclusion or recommendation." 89 | ], 90 | "metadata": { 91 | "azdata_cell_guid": "72df21bf-d182-4683-9bcb-51581be76e96" 92 | } 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "source": [ 97 | "# Appendix\r\n", 98 | "* Provide additional details." 99 | ], 100 | "metadata": { 101 | "azdata_cell_guid": "89d9daf9-1a58-4f46-aa9d-9081ee97fa7a" 102 | } 103 | } 104 | ] 105 | } -------------------------------------------------------------------------------- /Notebook Templates/TroubleshootingGuideTemplate.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Troubleshooting Guide Title\n", 20 | "_Last updated:_ \\\n", 21 | "_Author(s):_ \n", 22 | "\n", 23 | "Great troubleshooting guide must provide sufficient details for DBA / Data Engineers / Software Engineers / *technicians* to:\n", 24 | "* observe and identify symptoms,\n", 25 | "* mitigate, \n", 26 | "* understand potential impact to users / customers,\n", 27 | "* verify that the issue has been mitigated, and \n", 28 | "* communicate back to the stakeholders.\n", 29 | "\n", 30 | "> **Tip**: On the top of this TSG, a metadata block has been placed for tracking purposes as part of the Azure Global Compliant Automated Touches effort. For more details on how to fill the Metadata block, see [Metadata Block for TSGs](MetadataBlockForTSGs.ipynb).\n", 31 | "\n", 32 | "\n", 33 | "Jump to \n", 34 | "* [Overview](#overview)\n", 35 | "* [Symptoms](#symptoms)\n", 36 | " * [How to identify symptoms](#how-to-identify-symptoms)\n", 37 | "* [Mitigation](#mitigation)\n", 38 | " * [Risk / Impact to Customers / Users (if any)](#risk--impact-to-customers--users-if-any)\n", 39 | " * [1. Mitigation Step 1](#1-mitigation-step-1)\n", 40 | " * [2. Mitigation Step (n)](#2-mitigation-step-n)\n", 41 | "* [Communication / Notification](#communication--notification)\n", 42 | "* [Appendix](#appendix)\n", 43 | " * [Reference](#some-ref) (this is an example of using \"a id\" HTML element to do a deep link)\n", 44 | "\n", 45 | "> **Tip**: The above is an example on how to do deep linking / anchoring to certain parts of notebooks.\n", 46 | "\n", 47 | "\n", 48 | "\n", 49 | "" 50 | ], 51 | "metadata": { 52 | "azdata_cell_guid": "1e5ca23a-5122-4c35-8e06-4c6407baff15" 53 | } 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "source": [ 58 | "## Oveview\n", 59 | "* Describe background / reference materials on the module(s) / component(s) and errors that occured related to this Troubleshooting Guide.\n", 60 | "* Describe component owners / teams for this area, especially if Root Cause Analysis and Prevention measures are needed as a follow up for this symptom. \n", 61 | "\n", 62 | "## Symptoms\n", 63 | "* Describe the symptoms that users / customers are experiencing. \n", 64 | "\n", 65 | "### How to identify symptoms\n", 66 | "* Describe how to verify the symptoms from event log, traces, etc. \n", 67 | "\n" 68 | ], 69 | "metadata": { 70 | "azdata_cell_guid": "2907e1b9-89e4-4152-8a07-fe2551a0e104" 71 | } 72 | }, 73 | { 74 | "cell_type": "code", 75 | "source": [ 76 | "-- Insert the code to identify symptoms" 77 | ], 78 | "metadata": { 79 | "azdata_cell_guid": "881adad7-e782-4e9c-adaf-09200259d068" 80 | }, 81 | "outputs": [], 82 | "execution_count": null 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "source": [ 87 | "\n", 88 | "## Mitigation\n", 89 | "* Overview of the mitigation needed.\n", 90 | "\n", 91 | "### Risk / Impact to Customers / Users (if any)\n", 92 | "* Describe the risk / impact to customers if / when the mitigation is performed\n", 93 | "\n", 94 | "\n" 95 | ], 96 | "metadata": { 97 | "azdata_cell_guid": "a1fc3d38-9808-4e38-b43f-3b972865381a" 98 | } 99 | }, 100 | { 101 | "cell_type": "markdown", 102 | "source": [ 103 | "\n", 104 | "### 1. Mitigation Step 1\n", 105 | "* Describe in detail step by step on how to mitigate" 106 | ], 107 | "metadata": { 108 | "azdata_cell_guid": "9ab3008c-713a-4675-ae55-062bfb361ecd" 109 | } 110 | }, 111 | { 112 | "cell_type": "code", 113 | "source": [ 114 | "-- Code to mitigate step (n)" 115 | ], 116 | "metadata": { 117 | "azdata_cell_guid": "f8f945f4-c7e9-461f-95b4-4c5aaa275d34" 118 | }, 119 | "outputs": [], 120 | "execution_count": null 121 | }, 122 | { 123 | "cell_type": "markdown", 124 | "source": [ 125 | "### 2. Mitigation Step (n)\n", 126 | "* Describe in detail step by step on how to mitigate" 127 | ], 128 | "metadata": { 129 | "azdata_cell_guid": "2e925a96-2c85-4a8f-baa9-50d5d6341b8a" 130 | } 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "source": [ 135 | "\n", 136 | "## Verification\n", 137 | "\n" 138 | ], 139 | "metadata": { 140 | "azdata_cell_guid": "b32022b3-0987-439e-b2ad-52ce2e12d595" 141 | } 142 | }, 143 | { 144 | "cell_type": "code", 145 | "source": [ 146 | "-- Code to verify that the mitigation steps are working" 147 | ], 148 | "metadata": { 149 | "azdata_cell_guid": "255e79db-84aa-4306-8953-8bf09018d436" 150 | }, 151 | "outputs": [], 152 | "execution_count": null 153 | }, 154 | { 155 | "cell_type": "markdown", 156 | "source": [ 157 | "## Communication / Notification\n", 158 | "* Describe what / how to notify stakeholders / customers / users that the mitigiation has been done. \n", 159 | "* Where applicable, describe how to notify the stakeholders for Root Cause Analysis and Prevention. " 160 | ], 161 | "metadata": { 162 | "azdata_cell_guid": "72df21bf-d182-4683-9bcb-51581be76e96" 163 | } 164 | }, 165 | { 166 | "cell_type": "markdown", 167 | "source": [ 168 | "# Appendix\n", 169 | "\n", 170 | "- Provide additional details.\n", 171 | "- This can also serve as reference areas too. This paragraph contains \"a id\" HTML element that you can link to. ``\n", 172 | "\n", 173 | "\n", 174 | "![](../media/TemplatesmediaTroubleshootingGuideTemplate-image01.png)" 175 | ], 176 | "metadata": { 177 | "azdata_cell_guid": "89d9daf9-1a58-4f46-aa9d-9081ee97fa7a" 178 | } 179 | } 180 | ] 181 | } 182 | -------------------------------------------------------------------------------- /Notebook Templates/TroubleshootingGuideTemplateWithAnchors.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Troubleshooting Guide Title\r\n", 20 | "_Last updated:_ \\\r\n", 21 | "_Author(s):_ \r\n", 22 | "\r\n", 23 | "Great troubleshooting guide must provide sufficient details for DBA / Data Engineers / Software Engineers / *technicians* to:\r\n", 24 | "* observe and identify symptoms,\r\n", 25 | "* mitigate, \r\n", 26 | "* understand potential impact to users / customers,\r\n", 27 | "* verify that the issue has been mitigated, and \r\n", 28 | "* communicate back to the stakeholders.\r\n", 29 | "\r\n", 30 | "## Table of contents\r\n", 31 | "\r\n", 32 | "Jump to - \r\n", 33 | "* [Overview](#Overview)\r\n", 34 | "* [Symptoms](#Symptoms)\r\n", 35 | " * [How to identify symptoms](#How-to-identify-symptoms)\r\n", 36 | "* [Mitigation](#Mitigation)\r\n", 37 | " * [Risk / Impact to Customers / Users (if any)](#Risk--Impact-to-Customers--Users-if-any)\r\n", 38 | " * [1. Mitigation Step 1](#1-Mitigation-Step-1)\r\n", 39 | " * [2. Mitigation Step (n)](#2-Mitigation-Step-n)\r\n", 40 | "* [Communication / Notification](#Communication--Notification)\r\n", 41 | "* [Appendix](#Appendix)\r\n", 42 | " * [Reference](#some-ref) (this is an example of using \"a id\" HTML element to do a deep link)\r\n", 43 | "\r\n", 44 | "> **Tip**: The above is an example on how to do deep linking / anchoring to certain parts of notebooks.\r\n", 45 | "\r\n", 46 | "\r\n", 47 | "\r\n", 48 | "" 49 | ], 50 | "metadata": { 51 | "azdata_cell_guid": "d7e06713-0170-469a-abfe-6961a27b25c9" 52 | } 53 | }, 54 | { 55 | "cell_type": "markdown", 56 | "source": [ 57 | "## Overview\r\n", 58 | "* Describe background / reference materials on the module(s) / component(s) and errors that occured related to this Troubleshooting Guide.\r\n", 59 | "* Describe component owners / teams for this area, especially if Root Cause Analysis and Prevention measures are needed as a follow up for this symptom. \r\n", 60 | "\r\n", 61 | "## Symptoms\r\n", 62 | "* Describe the symptoms that users / customers are experiencing. \r\n", 63 | "\r\n", 64 | "### How to identify symptoms\r\n", 65 | "* Describe how to verify the symptoms from event log, traces, etc. \r\n", 66 | "\r\n", 67 | "" 68 | ], 69 | "metadata": { 70 | "azdata_cell_guid": "0658b9b4-497e-482b-a6e3-61ec15087d56" 71 | } 72 | }, 73 | { 74 | "cell_type": "code", 75 | "source": [ 76 | "-- Insert the code to identify symptoms" 77 | ], 78 | "metadata": { 79 | "azdata_cell_guid": "881adad7-e782-4e9c-adaf-09200259d068" 80 | }, 81 | "outputs": [], 82 | "execution_count": null 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "source": [ 87 | "\r\n", 88 | "## Mitigation\r\n", 89 | "* Overview of the mitigation needed.\r\n", 90 | "\r\n", 91 | "### Risk / Impact to Customers / Users (if any)\r\n", 92 | "* Describe the risk / impact to customers if / when the mitigation is performed\r\n", 93 | "\r\n", 94 | "\r\n", 95 | "" 96 | ], 97 | "metadata": { 98 | "azdata_cell_guid": "26862bc3-7ca7-4fa9-ac85-f781aa65b530" 99 | } 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "source": [ 104 | "\r\n", 105 | "### 1. Mitigation Step 1\r\n", 106 | "* Describe in detail step by step on how to mitigate" 107 | ], 108 | "metadata": { 109 | "azdata_cell_guid": "d30cfe4d-04bf-4d5b-8e40-9ca2fa2b0387" 110 | } 111 | }, 112 | { 113 | "cell_type": "code", 114 | "source": [ 115 | "-- Code to mitigate step (n)" 116 | ], 117 | "metadata": { 118 | "azdata_cell_guid": "0d10d448-9521-4555-a517-7bb2d68548ca" 119 | }, 120 | "outputs": [], 121 | "execution_count": null 122 | }, 123 | { 124 | "cell_type": "markdown", 125 | "source": [ 126 | "### 2. Mitigation Step (n)\r\n", 127 | "* Describe in detail step by step on how to mitigate" 128 | ], 129 | "metadata": { 130 | "azdata_cell_guid": "02fc8ac4-bea8-4539-90fc-8ce8c737c6f2" 131 | } 132 | }, 133 | { 134 | "cell_type": "markdown", 135 | "source": [ 136 | "\r\n", 137 | "## Verification\r\n", 138 | "\r\n", 139 | "" 140 | ], 141 | "metadata": { 142 | "azdata_cell_guid": "d778fdd1-007d-4e96-aff9-8b42ea8097f1" 143 | } 144 | }, 145 | { 146 | "cell_type": "code", 147 | "source": [ 148 | "-- Code to verify that the mitigation steps are working" 149 | ], 150 | "metadata": { 151 | "azdata_cell_guid": "255e79db-84aa-4306-8953-8bf09018d436" 152 | }, 153 | "outputs": [], 154 | "execution_count": null 155 | }, 156 | { 157 | "cell_type": "markdown", 158 | "source": [ 159 | "## Communication / Notification\r\n", 160 | "* Describe what / how to notify stakeholders / customers / users that the mitigiation has been done. \r\n", 161 | "* Where applicable, describe how to notify the stakeholders for Root Cause Analysis and Prevention. " 162 | ], 163 | "metadata": { 164 | "azdata_cell_guid": "d65cbde1-b111-4406-9ff6-32b144741c78" 165 | } 166 | }, 167 | { 168 | "cell_type": "markdown", 169 | "source": [ 170 | "# Appendix\n", 171 | "\n", 172 | "- Provide additional details.\n", 173 | "- This can also serve as reference areas too. This paragraph contains \"a id\" HTML element that you can link to. ``\n", 174 | "\n", 175 | " " 176 | ], 177 | "metadata": { 178 | "azdata_cell_guid": "f192c0e7-ae53-4342-af18-66760477b542" 179 | } 180 | } 181 | ] 182 | } -------------------------------------------------------------------------------- /Notebook Templates/TutorialTemplate.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# How-to Tutorial Title\r\n", 20 | "_Last updated:_ \\\r\n", 21 | "_Author(s):_ \r\n", 22 | "\r\n", 23 | "> **Tip**: Great tutorial document should contain sufficient information to enable students to:\r\n", 24 | "* understand the objective(s) of the tutorial,\r\n", 25 | "* setup the environment required (pre-requisites),\r\n", 26 | "* steps to follow, and\r\n", 27 | "* verify the tutorial goal is achieved.\r\n", 28 | "\r\n", 29 | "## Overview\r\n", 30 | "* Describe the objective(s) of this tutorial.\r\n", 31 | "\r\n", 32 | "## Prerequisites\r\n", 33 | "* Describe prerequisites of running this tutorial, for example: environment setup (including version number etc)\r\n", 34 | "* Provide reference to how to obtain the prerequisites, where applicable. \r\n", 35 | "\r\n", 36 | "### How to verify the prerequisites\r\n", 37 | "* Optionally, if there is a way to verify if a prerequisites are met. \r\n", 38 | "" 39 | ], 40 | "metadata": { 41 | "azdata_cell_guid": "1e5ca23a-5122-4c35-8e06-4c6407baff15" 42 | } 43 | }, 44 | { 45 | "cell_type": "code", 46 | "source": [ 47 | "-- Insert the code to check that the pre-requisite is met." 48 | ], 49 | "metadata": { 50 | "azdata_cell_guid": "881adad7-e782-4e9c-adaf-09200259d068" 51 | }, 52 | "outputs": [], 53 | "execution_count": null 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "source": [ 58 | "## Step (n) - (Description)\r\n", 59 | "* Describe the (n)th step.\r\n", 60 | "* Provide the code to get the results. " 61 | ], 62 | "metadata": { 63 | "azdata_cell_guid": "ccf118ee-4800-4d83-8089-92d98c7ec985" 64 | } 65 | }, 66 | { 67 | "cell_type": "code", 68 | "source": [ 69 | "-- Code to repro the approach." 70 | ], 71 | "metadata": { 72 | "azdata_cell_guid": "f8f945f4-c7e9-461f-95b4-4c5aaa275d34" 73 | }, 74 | "outputs": [], 75 | "execution_count": null 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "source": [ 80 | "## Summary / Conclusion\r\n", 81 | "* Describe that the objective of the tutorial is achieved. This could be something to verify, etc. " 82 | ], 83 | "metadata": { 84 | "azdata_cell_guid": "72df21bf-d182-4683-9bcb-51581be76e96" 85 | } 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "source": [ 90 | "# Appendix\r\n", 91 | "* Provide additional details." 92 | ], 93 | "metadata": { 94 | "azdata_cell_guid": "89d9daf9-1a58-4f46-aa9d-9081ee97fa7a" 95 | } 96 | } 97 | ] 98 | } -------------------------------------------------------------------------------- /Notebooks Presentations/All Day DevOps 2021/ADDO2021_FromOopsToOpsIncidentResponseWithJupyterNotebooks_JulieKoesmarno.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/All Day DevOps 2021/ADDO2021_FromOopsToOpsIncidentResponseWithJupyterNotebooks_JulieKoesmarno.pdf -------------------------------------------------------------------------------- /Notebooks Presentations/All Day DevOps 2021/readme.md: -------------------------------------------------------------------------------- 1 | # Welcome to **From Oops to Ops: Incident Management with Notebooks** 2 | 3 | Register / schedule: https://www.alldaydevops.com/2021-schedule 4 | Tuesday October 28, 2021 5 | 6 | Recording: **coming soon!** 7 | 8 | Speaker: 9 | - Julie Koesmarno - http://mssqlgirl.com | http://linkedin.com/in/juliekoesmarno | [@MsSQLGirl](http://twitter.com/mssqlgirl) 10 | 11 | ## Abstract 12 | What if you can apply software engineering practices to your troubleshooting guides (TSGs) / knowledge base for your team’s on-call or for your customers? What if you can reduce stress and mistakes in your incident response workflow and activate a more scientific approach? 13 | 14 | Join this session to learn more about TSG Ops framework using Jupyter Notebook for executable and automatable troubleshooting guides / knowledge base. TSG Ops innovates incident response approach by applying software engineering practice in curating TSGs and activating our scientific approach when troubleshooting. We will share our learnings and our journey in implementing TSG Ops using open source technology, internally in Azure Data and externally for our Azure Data customers. 15 | 16 | ## Useful pre-reads: 17 | 1. Azure Data Studio. 18 | - Download from http://aka.ms/getAzureDataStudio. 19 | 2. Basic knowledge on Notebooks: 20 | - [Notebooks 101 for SQL people video tutorial](https://www.youtube.com/watch?v=80L-UTOlknw) from Scottish Summit channel, or 21 | - [Jupyter Notebooks for the mere mortals video tutorial](https://www.youtube.com/watch?v=-akGNOsaMg0) from Tech(K)now Day channel. 22 | 3. PowerShellNotebook module. 23 | - [Invoke-PowerShellNotebook](https://github.com/dfinke/PowerShellNotebook). 24 | - Related video: https://www.youtube.com/watch?v=3b_LQn18oHI. 25 | 4. SqlServer module. 26 | - [Invoke-SqlNotebook](https://docs.microsoft.com/powershell/module/sqlserver/invoke-sqlnotebook) 27 | 28 | ## Topics covered: 29 | 1. Incident Response – why is it hard (to scale)? 30 | More complex world == more automation, yet incident response are often considered as a last thing to do. Most engineers don't want to spend time on documenting, they want to spend time on coding. What if engineers code the troubleshooting guides, instead of writing static documentation? 31 | 32 | 2. Re-framing our approach to troubleshooting guides: software artefacts & Jupyter Notebooks 33 | Troubleshooting Guides are software artefacts. Jupyter Notebooks can be executed through command lines and can be parameterized therefore it's can be added as part of your automation. This includes testing automation as well as incident response automation. 34 | 35 | 3. Implementation: Executable, reusable & automatable troubleshooting guides 36 | There are three automation categories: 37 | 1. Automate the Diagnostics / Root Cause Analysis (RCA). 38 | 2. Automate incident filing. 39 | 3. Automate mitigation steps. 40 | 41 | A flow example of the Automate RCA: 42 | - Use Power Automate or Azure Logic Apps to automate the flow, i.e. to automatically trigger when an email is received or if a new task is assigned to you, to send an email after a job is completed with diagnostic or mitigation results. 43 | - Use Azure Automation account and jobs that gets called by Power Automate or Azure Logic Apps, to run a PowerShell script that invokes a notebook (either SQL or PowerShell kernel based). 44 | - Use a Jupyter Notebook with SQL / PowerShell kernel that does a number of diagnostic queries (and mitigation if applicable). 45 | 46 | Watch the demo video previously presented at Scottish Summit - Notebooks 101 for SQL People starting at minute [27:07](https://youtu.be/80L-UTOlknw?t=1627). 47 | - Use Azure Data Studio UI to convert one SQL script at a time. 48 | Related blog post: https://www.mssqlgirl.com/inajiffy-converting-from-sql-scripts-to-notebook/ 49 | - Use PowerShellNotebook module for bulk imports, specifically these cmdlets: 50 | - `ConvertTo-PowerShellNotebook` to convert PowerShell scripts to Notebooks with PowerShell kernel. 51 | - `ConvertTo-SQLNotebook` to convert SQL scripts to Notebooks with SQL kernel. 52 | 53 | ## Examples of SQL related notebooks 54 | 1. Tiger Toolbox : 55 | - SQL Assessment Toolkit: https://github.com/microsoft/sql-server-samples/tree/master/samples/manage/sql-assessment-api/notebooks 56 | - SQL Hybrid Toolkit: https://github.com/microsoft/tigertoolbox/tree/master/SQL-Hybrid-Cloud-Toolkit/content 57 | 2. Glenn Berry’s Diagnostic Notebooks for SQL Server and Azure SQL: https://glennsqlperformance.com/resources/ 58 | 4. Rob Sewell’s Notebook: http://sqldbawithabeard.com https://github.com/SQLDBAWithABeard/JupyterNotebooks 59 | 4. The SQL Diagnostic (Jupyter) Book by Emanuele Meazzo https://tsql.tech/the-sql-diagnostic-jupyter-book/ 60 | 61 | ## Other useful resources 62 | Submit your issues on Azure Data Studio: https://github.com/microsoft/azuredatastudio/issues 63 | -------------------------------------------------------------------------------- /Notebooks Presentations/Around The Clock 2021/BasicKQLForAzureSQLDB-JulieKoesmarno.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/Around The Clock 2021/BasicKQLForAzureSQLDB-JulieKoesmarno.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/Around The Clock 2021/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to Around The Clock with Azure SQL and Azure Data Factory 2 | 3 | Demo notebook is [here](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/KQL%20Notebooks/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb). 4 | 5 | 6 | Useful resources: 7 | * Download Azure Data Studio: http://aka.ms/getAzureDataStudio 8 | * Learn more about Azure Data Studio: http://aka.ms/AzureDataStudio 9 | * [SQL to KQL cheatsheet](https://docs.microsoft.com/azure/data-explorer/kusto/query/sqlcheatsheet) 10 | * [Azure SQL diagnostic configuration](https://docs.microsoft.com/azure/azure-sql/database/metrics-diagnostic-telemetry-logging-streaming-export-configure?tabs=azure-portal#metrics-and-logs-available) 11 | * [Azure SQL DB and Log Analytics Better Together (3 parts)](https://techcommunity.microsoft.com/t5/azure-database-support-blog/azure-sql-db-and-log-analytics-better-together-part-1/ba-p/794833) 12 | * [KQL query samples](https://docs.microsoft.com/azure/data-explorer/kusto/query/samples?&pivots=azuremonitor ) 13 | * [Short video on KQL capability in Azure Data Studio at Data Exposed()](https://techcommunity.microsoft.com/t5/video-hub/using-kusto-query-language-kql-in-azure-data-studio-data-exposed/m-p/1897166) 14 | -------------------------------------------------------------------------------- /Notebooks Presentations/C# Corner 2021/Executable TSG with Notebooks SQL People - C# Corner 2021.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/C# Corner 2021/Executable TSG with Notebooks SQL People - C# Corner 2021.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/C# Corner 2021/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to Executable Troubleshooting Guides for SQL People 2 | ## SQL Server Virtual Conference by C# Corner 3 | 4 | Presenter: Julie Koesmarno [@MsSQLGirl](http://twiter.com/MsSQLGirl) and Aaron Nelson [@SQLVariant](http://twitter.com/SQLVariant) 5 | Jan 29, 2021 6 | 7 | Watch the recording on [YouTube](https://youtu.be/AzSB6AxBGbQ) 8 | 9 | ## Resources used during the presentation: 10 | 1. [Notebook Templates](https://github.com/MsSQLGirl/jubilant-data-wizards/tree/main/Notebook%20Templates) 11 | 2. [Aaron's executable TSG notebook](https://gist.github.com/SQLvariant/b8a5f17f9019106d65498133c6d099ae) 12 | 13 | If you are working with Azure SQL (or other Azure services) and looking to learn more about KQL + notebooks, check out this 20 minute video on YouTube also, [Basic KQL for monitoring Azure SQL services](https://youtu.be/GbsmbA65cjc) 14 | 15 | ![](https://www.mssqlgirl.com/wp-content/uploads/2021/01/image.png) 16 | -------------------------------------------------------------------------------- /Notebooks Presentations/Cloud Summit 2021/FromOopsToOpsIncidentResponseWithJupyterNotebooks.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/Cloud Summit 2021/FromOopsToOpsIncidentResponseWithJupyterNotebooks.pdf -------------------------------------------------------------------------------- /Notebooks Presentations/Cloud Summit 2021/readme.md: -------------------------------------------------------------------------------- 1 | # Welcome to From Oops to Ops: Incident Response with Jupyter Notebooks 2 | 3 | Presenters: 4 | - Julie Koesmarno ([LinkedIn](https://www.linkedin.com/in/juliekoesmarno/) | twitter: [@MsSQLGirl](http://twitter.com/mssqlgirl) | http://MsSQLGirl.com | [GitHub](https://github.com/mssqlgirl)) 5 | - Shafiq Rahman ([LinkedIn](https://www.linkedin.com/in/shafiq-rahman-16853a5/) | [GitHub](https://github.com/qifahs)) 6 | 7 | Live session schedule on **Sep 16, 2021 at 9:10am Pacific Time** on https://azuresummit.live/ 8 | 9 | ## Slide Deck 10 | Slide deck is available [here](./FromOopsToOpsIncidentResponseWithJupyterNotebooks.pdf) 11 | 12 | ## Session Notes 13 | This session assumes that you have some basic knowledge of Jupyter Notebooks and paramterization in Jupyter Notebooks. 14 | 15 | Here are some videos to get you up to speed: 16 | 1. Watch Julie Koesmarno’s Jupyter Notebooks for the Mere Mortals: [youtu.be/-akGNOsaMg0](http://youtu.be/-akGNOsaMg0) 17 | 2. Watch Aaron Nelson’s Parameterization in Azure Data Studio (for PowerShell) [youtu.be/5DDeSb-mHP0](http://youtu.be/5DDeSb-mHP0) 18 | 19 | 20 | ## Session Recording 21 | [Day 4 at Cloud Summit](https://youtu.be/eVVyWNSxtco?t=10074) 22 | -------------------------------------------------------------------------------- /Notebooks Presentations/Data Platform Summit 2021/readme.md: -------------------------------------------------------------------------------- 1 | # Welcome to Notebooks Deep Dive at Data Platform Summit 2021 2 | 3 | ## Useful References 4 | 5 | 1. Azure Monitor Logs in Azure Data Studio (link coming soon) 6 | 2. 7 | 8 | 9 | ### Launch and execute SQL Docs articles as notebooks in Azure Data Studio 10 | 11 | This is a preview feature for us to collect feedback from you. The docs articles that you can launch and execute as notebooks in Azure Data Studio are: 12 | 1. MSSQLSERVER_35250 13 | https://docs.microsoft.com/sql/relational-databases/errors-events/mssqlserver-35250-database-engine-error 14 | 2. Troubleshoot a Full Transaction Log (SQL Server Error 9002) 15 | https://docs.microsoft.com/sql/relational-databases/logs/troubleshoot-a-full-transaction-log-sql-server-error-9002 16 | 17 | 18 | ### Execute Notebook from CLI 19 | 1. For PowerShell Notebooks: Use [Invoke-ExecuteNotebook](https://github.com/dfinke/PowerShellNotebook#executing-a-notebook) 20 | This requires PowerShell 7.1. 21 | 2. For SQL Notebooks: Use [Invoke-SqlNotebook](https://docs.microsoft.com/powershell/module/sqlserver/invoke-sqlnotebook) 22 | 3. For Python Notebooks (including Kqlmagic): Use [Papermill](https://docs.microsoft.com/sql/azure-data-studio/notebooks/parameterize-papermill) 23 | 24 | 25 | ### Notebook Parameterization 26 | When using Notebook Parameterization, you can use Python and PowerShell 7.1. You can create an "API" like to parameterize notebook execution. read more: 27 | - [Create a parameterized notebook by using Papermill](https://docs.microsoft.com/sql/azure-data-studio/notebooks/parameterize-papermill) 28 | - [Create a parameterized notebook by using the notebook URI](https://docs.microsoft.com/en-us/sql/azure-data-studio/notebooks/parameterize-uri) 29 | - [Create a paramtereized notebook by using the Run with Parametes action](https://docs.microsoft.com/en-us/sql/azure-data-studio/notebooks/run-with-parameters) -------------------------------------------------------------------------------- /Notebooks Presentations/DataMinds Connect 2020/NotebooksForTriageAndIncidentResponse.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/DataMinds Connect 2020/NotebooksForTriageAndIncidentResponse.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/DataMinds Connect 2020/README.md: -------------------------------------------------------------------------------- 1 | # Notebooks for Triage and Incident Response: A Tale of DBA Heroism​ 2 | 3 | In this session you will learn how to leverage Azure Data Studio and notebooks to monitor and diagnose SQL Server and Azure SQL instances. Cornerstone tools such as extended events, PowerShell, and dynamic management views (used by DBAs and Azure CSS alike) can be combined with the power of Azure Data Studio and T-SQL notebooks to facilitate managing your data estate. Modernize your troubleshooting / incident response guides to executable and automatable guides, for efficient root cause analysis, mitigation and auditing! 4 | 5 | 6 | Presenters: [Julie Koesmarno](http://twitter.com/MsSQLGirl), [Drew Skwiers-Koballa](http://twitter.com/SysAdminDrew) 7 | 8 | Useful links: 9 | 1. [Slide Deck](./NotebooksForTriageAndIncidentResponse.pptx) -> This also has more useful resources for the DBA scenarios :) 10 | 2. [Simple Demo Convert To Notebook](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Useful%20Notebooks/DemoConvertToNotebooks.ipynb) 11 | 3. [Sample: Convert TigerToolbox to Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Useful%20Notebooks/ConvertTigerToolboxSQLToNotebook.ipynb) 12 | 4. [DBA Notebooks](https://github.com/dzsquared/dba-notebooks) by Drew Skwiers-Koballa 13 | 5. [The Learner's Guide to SQL Server Performance Triage](https://littlekendra.com/2020/03/19/the-learners-guide-to-sql-server-performance-triage/) by Kendra Little 14 | 6. [SQL Server Diagnostic Jupyter Book](https://tsql.tech/the-sql-diagnostic-jupyter-book/) by Emanuele Meazzo 15 | 7. [SQL DBA with a Beard's Jupyter Notebooks](https://beard.media/Notebooks) by Rob Sewell 16 | 8. [How can I automate SQL Notebooks](https://github.com/SQLvariant/Demos/blob/master/Presentations/Intro-to-Jupyter-Notebooks/8.How-Can-I-Automate-SQL-Notebooks/How-Can-I-Automate-SQL-Notebooks.ipynb) by Aaron Nelson 17 | -------------------------------------------------------------------------------- /Notebooks Presentations/GroupBy 2020/20200513 - Notebooks 101 for SQL People.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/GroupBy 2020/20200513 - Notebooks 101 for SQL People.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/LightUp 2020 Virtual Conference/20200714 - Notebooks 101 for SQL People.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/LightUp 2020 Virtual Conference/20200714 - Notebooks 101 for SQL People.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/LightUp 2020 Virtual Conference/README.md: -------------------------------------------------------------------------------- 1 | # LightUp Virtual Conference 2 | 3 | Event website: https://www.2020twenty.net/lightup/ 4 | 5 | Slide deck: [Notebooks 101 for SQL People](./20200714%20-%20Notebooks%20101%20for%20SQL%20People.pptx) 6 | 7 | Blog post: [MsSQLGirl.com](http://mssqlgirl.com) 8 | 9 | Demo References: 10 | 1. [Data Analysis Template](../NotebookTemplates/DataAnalysisTemplate.ipynb) 11 | 2. [Troubleshooting Guide Template](../NotebookTemplates/TroubleshootingGuideTemplate.ipynb) 12 | 3. [Tutorial / How-To Template](../NotebookTemplates/TutorialTemplate.ipynb) 13 | 14 | Sample Notebook: 15 | 1. [Data Analysis with Wide World Importers dataset using Notebook with SQL Kernel](Simple%20Demo\Sample%20Notebooks%20-%20Data%20Analysis\WWIReproducibleResearch%20Vol%201.ipynb) 16 | 2. [Sample research with Python](Simple%20Demo\Sample%20Notebooks%20-%20Data%20Analysis\ReproducibleResearch.ipynb) -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1000 Oregon/20201024 - Notebooks 101 for SQL People.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 1000 Oregon/20201024 - Notebooks 101 for SQL People.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1000 Oregon/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to Notebooks 101 for SQL People 2 | ## SQL Saturday #1000 Oregon (virtual) 3 | https://www.sqlsaturday.com/1000/ 4 | 5 | Presenter: Julie Koesmarno [@MsSQLGirl](http://twiter.com/MsSQLGirl) \ 6 | Oct 24, 2020 7 | 8 | 9 | ![](./media/Notebooks101ForSQLPeopleSQLSat1000-thumb.png) 10 | ## Abstract 11 | _Are you a database developer, a DBA or a data analyst? Do you find spending quite a bit of time trying to reproduce analysis or reproduce issues and the troubleshooting techniques? This Notebooks 101 session is for you!_ 12 | 13 | _The lack of rigor in being able to reproduce analysis in business context or to reproduce data troubleshooting can lead to confusion and time wasted on work that had been previously done. With Notebooks, Data Professionals can share their techniques and data sources used for the data analysis or troubleshooting for code review and reproducible insights/troubleshooting._ 14 | 15 | _Azure Data Studio Notebooks support SQL and KQL natively with intellisense; with easy to use charting capabilities. With Notebooks and Jupyter Book (collection of Notebooks) in Azure Data Studio, it’s also easy now for your team to build an onboarding guide (user manual) / documentation on data sources, useful sample scripts and tutorials._ 16 | 17 | ## Resources used at SQL Saturday #1000 Oregon: 18 | 1. [Slide Deck](https://github.com/MsSQLGirl/jubilant-data-wizards/raw/main/Notebooks%20Presentations/SQL%20Saturday%201000%20Oregon/20201024%20-%20Notebooks%20101%20for%20SQL%20People.pptx) 19 | 2. [Notebook Templates](https://github.com/MsSQLGirl/jubilant-data-wizards/tree/main/Notebook%20Templates) 20 | 3. Notebook Demo: 21 | - [Data Analysis with SQL Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/WWIReproducibleResearch%20Vol%201.ipynb) 22 | - [Data Analysis with Python Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/ReproducibleResearch.ipynb) 23 | - [Convert SQL scripts / PowerShell scripts to Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Useful%20Notebooks/DemoConvertToNotebooks.ipynb) 24 | 4. Remote Jupyter Book sample that you can use to download to Azure Data Studio directly: https://github.com/MsSQLGirl/jubilant-data-wizards/releases/tag/v0.0.0 25 | 26 | ## Other useful resources: 27 | 1. [How to use Jupyter Notebooks in Azure Data Studio | Azure Friday](https://www.youtube.com/watch?v=pHuRj9ty9cI) 28 | 2. [PowerShellNotebook](https://github.com/dfinke/PowerShellNotebook) 29 | 3. [WideWorldImporters sample database to restore to your SQL Server / Azure SQL DB](https://github.com/Microsoft/sql-server-samples/releases/tag/wide-world-importers-v1.0) 30 | 31 | ## FAQ 32 | 1. Does SQL Notebooks work only for SQL Server? \ 33 | It works for SQL Server, Azure SQL DB + Managed Instance and PostgreSQL. See [PostgreSQL extension in Azure Data Studio documentation](https://docs.microsoft.com/en-us/sql/azure-data-studio/extensions/postgres-extension?view=sql-server-ver15). 34 | 35 | 2. Does the resultset support spatial data type? \ 36 | Not currently. Please vote [Spatial support feature request](https://github.com/microsoft/azuredatastudio/issues/267) 37 | 38 | 3. Is it possible to export result set of notebooks to Excel? \ 39 | In Azure Data Studio, there are a few buttons on top of the result set that you can use to export result set to another file format. 40 | 41 | 4. Using the SQL Notebook, is it possible to join to many other data sources (PostgreSQL and SQL Server)? 42 | No. Currently, this would require Python. (Unless you have [EXTERNAL DATA SOURCE](https://docs.microsoft.com/sql/t-sql/statements/create-external-data-source-transact-sql) defined in SQL Server). 43 | 44 | 5. What's the best way to share notebooks? 45 | - A great way to start is using Source Control, like git system which is supported in Azure Data Studio natively. 46 | - Another way is to ship this as Jupyter Remote Book in GitHub. 47 | - Create an extension to package your Jupyter Book like [SQL Server Diagnostic Book as an Extension](https://github.com/EmanueleMeazzo/tsql.tech-Code-snippets/releases/tag/v1.0). 48 | -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1000 Oregon/media/Notebooks101ForSQLPeopleSQLSat1000-thumb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 1000 Oregon/media/Notebooks101ForSQLPeopleSQLSat1000-thumb.png -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1015 Vienna/20210115 - Notebooks 101 for SQL People.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 1015 Vienna/20210115 - Notebooks 101 for SQL People.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1015 Vienna/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to Notebooks 101 for SQL People 2 | ## [SQL Saturday #1015 Vienna (virtual)](https://sqlusergroupaustria.wordpress.com/2021/01/08/sqlsaturday-vienna-2021-schedule/) 3 | 4 | 5 | Presenter: Julie Koesmarno [@MsSQLGirl](http://twiter.com/MsSQLGirl) \ 6 | Jan 15, 2021 7 | 8 | 9 | ![](./media/Notebooks101ForSQLPeopleSQLSat1015-thumb.png) 10 | ## Abstract 11 | _Are you a database developer, a DBA or a data analyst? Do you find spending quite a bit of time trying to reproduce analysis or reproduce issues and the troubleshooting techniques? This Notebooks 101 session is for you!_ 12 | 13 | _The lack of rigor in being able to reproduce analysis in business context or to reproduce data troubleshooting can lead to confusion and time wasted on work that had been previously done. With Notebooks, Data Professionals can share their techniques and data sources used for the data analysis or troubleshooting for code review and reproducible insights/troubleshooting._ 14 | 15 | _Azure Data Studio Notebooks support SQL and KQL natively with intellisense; with easy to use charting capabilities. With Notebooks and Jupyter Book (collection of Notebooks) in Azure Data Studio, it’s also easy now for your team to build an onboarding guide (user manual) / documentation on data sources, useful sample scripts and tutorials._ 16 | 17 | ## Resources used at SQL Saturday #1015 Vienna: 18 | 1. [Slide Deck](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Notebooks%20Presentations/SQL%20Saturday%201015%20Vienna/20210115%20-%20Notebooks%20101%20for%20SQL%20People.pptx) 19 | 2. Download the latest Azure Data Studio at http://aka.ms/getAzureDataStudio 20 | > [Azure Data Studio](http://aka.ms/AzureDataStudio) is a cross platform (Windows, macOS and Linux) client tool for modern data experiences. In this presentation, I specifically used Azure Data Studio for all the demos with occassional GitHub Jupyter Notebook viewer. 21 | 3. Notebook Demo: 22 | - [Data Analysis with SQL Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/WWIReproducibleResearch%20Vol%201.ipynb) 23 | - [Data Analysis with Python Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/ReproducibleResearch.ipynb) 24 | - [Convert SQL scripts / PowerShell scripts to Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Useful%20Notebooks/DemoConvertToNotebooks.ipynb) 25 | 4. Remote Jupyter Book sample that you can use to download to Azure Data Studio directly: https://github.com/MsSQLGirl/jubilant-data-wizards/releases/tag/v0.0.0 26 | 5. Tiger Toolbox Jupyter Notebooks: https://github.com/microsoft/tigertoolbox/releases 27 | 28 | ## Other Jupyter Notebooks built by the Data community 29 | 1. Glenn Berry’s Diagnostic Notebooks for SQL Server and Azure SQL DB: https://glennsqlperformance.com/resources/ 30 | 2. Rob Sewell's Notebooks (PowerShell and .net interactive): http://sqldbawithabeard.com and https://github.com/SQLDBAWithABeard/JupyterNotebooks 31 | 3. Emanuele Meazzo's SQL Diagnostic Jupyter book: https://tsql.tech/the-sql-diagnostic-jupyter-book/ 32 | 33 | ## Other useful resources: 34 | 1. [How to use Jupyter Notebooks in Azure Data Studio | Azure Friday](https://www.youtube.com/watch?v=pHuRj9ty9cI) 35 | 2. [PowerShellNotebook](https://github.com/dfinke/PowerShellNotebook) module created by Doug Finke 36 | 3. [WideWorldImporters sample database to restore to your SQL Server / Azure SQL DB](https://github.com/Microsoft/sql-server-samples/releases/tag/wide-world-importers-v1.0) 37 | 4. [.net interactive notebooks in Azure Data Studio](https://channel9.msdn.com/Shows/Data-Exposed/Jupyter-Launch-NET-Interactive-Notebooks--Data-Exposed-MVP-Edition) with Rob Sewell and Anna Hoffman at Data Exposed. 38 | 5. [Notebook Templates for Troubleshooting Guide, Data Analysis and HowTo tutorials](https://github.com/MsSQLGirl/jubilant-data-wizards/tree/main/Notebook%20Templates) 39 | 6. [Request new features or report an issue on Azure Data Studio GitHub](https://github.com/microsoft/azuredatastudio/issues) 40 | 41 | ## FAQ 42 | 1. Does SQL Notebooks work only for SQL Server? 43 | It works for SQL Server, Azure SQL DB + Managed Instance and PostgreSQL. See [PostgreSQL extension in Azure Data Studio documentation](https://docs.microsoft.com/en-us/sql/azure-data-studio/extensions/postgres-extension?view=sql-server-ver15). 44 | 45 | 2. Does the resultset support spatial data type? 46 | Not currently. Please vote [Spatial support feature request](https://github.com/microsoft/azuredatastudio/issues/267) 47 | 48 | 3. Is it possible to export result set of notebooks to Excel or PDF 49 | In Azure Data Studio, there are a few buttons on top of the result set that you can use to export result set to another file format. There is no native support for exporting to PDF in Azure Data Studio today - however, check out [How to convert Jupyter notebooks into PDF](https://towardsdatascience.com/how-to-convert-jupyter-notebooks-into-pdf-5accaef3758). 50 | 51 | 4. Using the SQL Notebook, is it possible to join to many other data sources (PostgreSQL and SQL Server)? 52 | No. Currently, this would require Python. (Unless you have [EXTERNAL DATA SOURCE](https://docs.microsoft.com/sql/t-sql/statements/create-external-data-source-transact-sql) defined in SQL Server). 53 | 54 | 5. What's the best way to share notebooks? 55 | - A great way to start is using Source Control, like git system which is supported in Azure Data Studio natively. 56 | - Another way is to ship this as Jupyter Remote Book in GitHub. 57 | - Create an extension to package your Jupyter Book like [SQL Server Diagnostic Book as an Extension](https://github.com/EmanueleMeazzo/tsql.tech-Code-snippets/releases/tag/v1.0). 58 | 59 | 6. How do I keep up with Azure Data Studio releases? 60 | Check out [Azure Data Studio release blog post](https://cloudblogs.microsoft.com/sqlserver/?product=azure-data-studio) or [Release Notes](https://docs.microsoft.com/sql/azure-data-studio/release-notes-azure-data-studio). Also follow [@AzureDataStudio](http://twitter.com/AzureDataStudio] on twitter. 61 | 62 | A shout out to my PM colleagues, [Alan Yu](https://twitter.com/AlanYuSQL) and [Drew Skwiers-Koballa](https://twitter.com/SysAdminDrew), who are also active on Twitter on tips & tricks including new releases of Azure Data Studio. 63 | 64 | 7. Where should I submit feature request or report issues? 65 | http://aka.ms/askAzureDataStudio or follow us on twitter [@AzureDataStudio](http://aka.ms/AzureDataStudio). 66 | -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1015 Vienna/SQLSatVienna - Petrinja, Sisak and Glina.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 1015 Vienna/SQLSatVienna - Petrinja, Sisak and Glina.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1015 Vienna/media/Notebooks101ForSQLPeopleSQLSat1015-thumb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 1015 Vienna/media/Notebooks101ForSQLPeopleSQLSat1015-thumb.png -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1019 Singapore/20201128 - Kusto Query Language (KQL) in Azure Data Studio.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 1019 Singapore/20201128 - Kusto Query Language (KQL) in Azure Data Studio.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1019 Singapore/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to Kusto Query Language (KQL) in Azure Data Studio 2 | ## SQL Saturday #1019 Singapore (virtual) 3 | https://www.sqlsaturday.com/1019/ 4 | 5 | Presenter: Julie Koesmarno [@MsSQLGirl](http://twiter.com/MsSQLGirl) \ 6 | Nov 28, 2020 7 | 8 | 9 | ![](./media/KQLinADSForSQLSat1019.png) 10 | ## Abstract 11 | _Join this session to learn more about the Kusto Query Language (KQL) experiences in Azure Data Studio. This is a quick overview session on how to get started with KQL in Azure Data Studio. You'll learn how to connect to Azure Data Explorer and Azure Log Analytics in notebooks. In this demo-filled session, you will also learn a practical example of analyzing Azure SQL database log stored in an Azure Log Analytics workspace._ 12 | 13 | ## Resources used at SQL Saturday #1000 Oregon: 14 | 1. [Slide Deck](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Notebooks%20Presentations/SQL%20Saturday%201019%20Singapore/20201128%20-%20Kusto%20Query%20Language%20(KQL)%20in%20Azure%20Data%20Studio.pptx) 15 | 2. [Notebook Templates](https://github.com/MsSQLGirl/jubilant-data-wizards/tree/main/Notebook%20Templates) 16 | 3. Native Kusto demo: 17 | - [Native Kusto - Aggregates in KQL](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/NativeKusto-AggregatesInKusto.kql) 18 | - [Native Kusto - Simple Kusto Demo](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/NativeKusto-SimpleKustoNotebook.ipynb) 19 | - [Cross cluster querying with Kusto Notebook](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/KQL%20Notebooks/Demo-NativeKusto-CrossClusterQuery.ipynb) 20 | 4. Kqlmagic Demo 21 | - [Kqlmagic - Log Analytics](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/KQL%20Notebooks/Demo-KqlmagicLogAnalyticsWithUserAuth.ipynb) 22 | - [Analyzing Log Analytics Data with Client Secret auth](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/KQL%20Notebooks/Demo-KqlmagicLogAnalyticsWithClientSecret.ipynb) 23 | - [Parameterized with Kqlmagic Notebook with Papermill](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/KQL%20Notebooks/Demo-ExecuteParameterizedNotebookKqlmagicLogAnalyticsWithClientSecret.ipynb) 24 | 25 | 26 | ## Other useful resources: 27 | 1. Azure Data Studio http://aka.ms/getAzureDataStudio 28 | 2. [Using Kusto Query Language in Azure Data Studio at Data Exposed, Channel 9](https://www.youtube.com/watch?v=Da1WfrLRl9s) 29 | 3. [Azure Data Studio Notebooks Power Hour at DPS2020, 3 Dec 2020](https://dataplatformgeeks.com/dps2020/session/azure-data-studio-notebooks-power-hour/) 30 | 4. [Analyze data in Azure Data Explorer using Kusto Query Language (KQL) extension in Azure Data Studio](https://cloudblogs.microsoft.com/sqlserver/2020/09/24/analyze-data-in-azure-data-explorer-using-kusto-query-language-kql-extension-in-azure-data-studio/) 31 | Sample files https://github.com/MsSQLGirl/jubilant-data-wizards/tree/main/Simple%20Demo/KQL%20Notebooks 32 | 5. [Learn KQL at Pluralsight for Free](https://www.pluralsight.com/courses/kusto-query-language-kql-from-scratch) 33 | 34 | 35 | 36 | ## FAQ 37 | 38 | **1. Can you connect to Log Analytics in Azure Data Studio?** 39 | 40 | Yes. In Native Kusto, the current workaround is through Azure Data Explorer Proxy, i.e. connect to help.kusto.windows.net cluster, then qualify the Log Analytis table with the `cluster().database()` path. 41 | For example: 42 | Querying top 10 rows of AzureDiagnostics table in a Log Analytics workspace. 43 | ``` 44 | cluster('https://ade.loganalytics.io/subscriptions//resourcegroups//providers/microsoft.operationalinsights/workspaces/').database('').AzureDiagnostics | take 10 45 | ``` 46 | 47 | Using Kqlmagic in a notebook, simply use the following: 48 | The Workspace ID is a GUID of the Log Analytics workspace as specified on Azure Portal. 49 | ``` 50 | %kql loganalytics://code;workspace='' 51 | ``` 52 | 53 | The Workspace ID is a GUID of the Log Analytics workspace as specified on Azure Portal. Specify the TenantID in case you are working with multiple tenants / Azure accounts. 54 | ``` 55 | %kql loganalytics://tenant=;workspace='' 56 | ``` 57 | 58 | 59 | **2. Does the resultset support spatial data type?** 60 | 61 | Not currently. Please vote [Spatial support feature request](https://github.com/microsoft/azuredatastudio/issues/267) 62 | 63 | **3. How do I enable auditing for Azure SQL Database and Azure Synapse Analytics?** 64 | 65 | Through Azure Portal as per [Auditing for Azure SQL Database and Azure Synapse Analytics](https://docs.microsoft.com/en-us/azure/azure-sql/database/auditing-overview) 66 | For Azure SQL's diagnostics, [Configure streaming export of Azure SQL Database and SQL Managed Instance diagnostic telemetry](https://docs.microsoft.com/en-us/azure/azure-sql/database/metrics-diagnostic-telemetry-logging-streaming-export-configure?tabs=azure-portal) 67 | 68 | 69 | **4. Are there any good examples of queries on Log Analytics to understand what's going on with my Azure SQL DB?** 70 | 71 | As of 11/27, I don't have anything handy. I'll find out :) 72 | 73 | To get started, I recommend: 74 | * [Useful queries by Azure Monitor community on GitHub](https://github.com/microsoft/AzureMonitorCommunity/tree/master/Azure%20Services/SQL%20databases/Queries) 75 | * [Azure SQL DB and Log Analytics better together series](https://techcommunity.microsoft.com/t5/azure-database-support-blog/azure-sql-db-and-log-analytics-better-together-part-1/ba-p/794833#:~:text=%20AZURE%20SQL%20DB%20AND%20LOG%20ANALYTICS%20BETTER,the%20database%20using%20a%20language%20called...%20More%20) 76 | 77 | **5. Is the `render` syntax supported in Azure Data Studio** 78 | * Native Kusto - not yet. Please request this feature [here](https://github.com/microsoft/azuredatastudio/issues). 79 | * Kqlmagic - yes, it is integrated with plotly in Notebooks in Azure Data Studio. 80 | -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 1019 Singapore/media/KQLinADSForSQLSat1019.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 1019 Singapore/media/KQLinADSForSQLSat1019.png -------------------------------------------------------------------------------- /Notebooks Presentations/SQL Saturday 997 Salt Lake City/20200829 - Notebooks 101 for SQL People.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/SQL Saturday 997 Salt Lake City/20200829 - Notebooks 101 for SQL People.pptx -------------------------------------------------------------------------------- /Notebooks Presentations/Scottish Summit 2021/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to Notebooks 101 for SQL People 2 | ## Scottish Summit 2021 edition 3 | 4 | Recording: https://www.youtube.com/watch?v=80L-UTOlknw 5 | 6 | Useful References: 7 | 1. http://aka.ms/getAzureDataStudio to download Azure data Studio 8 | 2. [Notebook Templates](https://github.com/MsSQLGirl/jubilant-data-wizards/tree/main/Notebook%20Templates) 9 | 3. Notebook Demo: 10 | - [Data Analysis with SQL Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/WWIReproducibleResearch%20Vol%201.ipynb) 11 | - [Data Analysis with Python Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/ReproducibleResearch.ipynb) 12 | - [Convert SQL scripts / PowerShell scripts to Notebooks](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Useful%20Notebooks/DemoConvertToNotebooks.ipynb) 13 | 4. Remote Jupyter Book sample that you can use to download to Azure Data Studio directly: https://github.com/MsSQLGirl/jubilant-data-wizards/releases/tag/v0.0.0 14 | 15 | ## Other useful resources: 16 | 1. [How to use Jupyter Notebooks in Azure Data Studio | Azure Friday](https://www.youtube.com/watch?v=pHuRj9ty9cI) 17 | 2. [PowerShellNotebook](https://github.com/dfinke/PowerShellNotebook) 18 | 3. [Azure Data Studio Blog](https://cloudblogs.microsoft.com/sqlserver/?product=azure-data-studio) 19 | 20 | ## Other Notebooks from / for the SQL community 21 | 1. [Tiger Toolbox: SQL Server Troubleshooting Notebooks](https://github.com/microsoft/tigertoolbox/tree/master/Troubleshooting-Notebooks) 22 | 2. [Rob Sewell's Jupyter Notebooks (for DBAs)](https://github.com/SQLDBAWithABeard/JupyterNotebooks) 23 | 3. [Emanuele Meazzo's SQL Server Diagnostic Notebooks](https://tsql.tech/sql-server-diagnostic-notebook-updated/) 24 | -------------------------------------------------------------------------------- /Notebooks Presentations/Techorama 2021/From Oops to Ops Incident Response with Notebooks.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/Techorama 2021/From Oops to Ops Incident Response with Notebooks.pdf -------------------------------------------------------------------------------- /Notebooks Presentations/Techorama 2021/readme.md: -------------------------------------------------------------------------------- 1 | # Welcome to **From Oops to Ops: Incident Management with Notebooks** 2 | 3 | Register / schedule: https://techorama.be/agenda?day=tuesday. 4 | Tuesday May 18, 2021 at 16:00 CET. 5 | 6 | Speakers: 7 | - Julie Koesmarno - http://mssqlgirl.com | http://linkedin.com/in/juliekoesmarno | [@MsSQLGirl](http://twitter.com/mssqlgirl) 8 | - Shafiq Rahman - https://www.linkedin.com/in/shafiq-rahman-16853a5/ 9 | 10 | ## Abstract 11 | What if you can apply software engineering practices to your troubleshooting guides (TSGs) / playbook / runbook for your team’s on-call? 12 | What if you can reduce stress and mistakes in your incident response workflow? 13 | 14 | Join this session to learn more about TSG Ops framework, where we revolutionize how Database Administrators and Data Engineers can be more effective 15 | and feel empowered during incident response. We will share some of our learnings and our journey on implementing TSG Ops, which is based on Jupyter Notebooks. 16 | 17 | We’ll show how TSG Ops in Azure Data Studio makes incident response for SQL Server (on-prem) and Azure SQL much easier. We’ll show case how you can use 18 | Notebooks with your favorite languages, from SQL to KQL, from PowerShell to Python, and all the way to .net interactive, providing you most flexibility. 19 | 20 | ## Useful pre-reads: 21 | 1. Azure Data Studio. 22 | - Download from http://aka.ms/getAzureDataStudio. 23 | 2. Basic knowledge on Notebooks: 24 | - [Notebooks 101 for SQL people video tutorial](https://www.youtube.com/watch?v=80L-UTOlknw) from Scottish Summit channel, or 25 | - [Jupyter Notebooks for the mere mortals video tutorial](https://www.youtube.com/watch?v=-akGNOsaMg0) from Tech(K)now Day channel. 26 | 3. PowerShellNotebook module. 27 | - [Invoke-PowerShellNotebook](https://github.com/dfinke/PowerShellNotebook). 28 | - Related video: https://www.youtube.com/watch?v=3b_LQn18oHI. 29 | 4. SqlServer module. 30 | - [Invoke-SqlNotebook](https://docs.microsoft.com/powershell/module/sqlserver/invoke-sqlnotebook) 31 | 32 | ## Topics covered: 33 | 1. Intro to TSG and Notebooks. 34 | Our studies in Incident Response scenarios show that DBAs / Data Professionals still use static documents (Word, OneNote, Wiki) as Knowledge Base or troubleshooting guides. While traditionally this is OK, as data estate and ecosystems become more complex, we think that treating troubleshooting guides as code will help with maintainability and scalability. Azure Data Studio is continuously pushing for improvements in Notebooks that are ideal for supportability stories. 35 | 2. Executable TSGs with Notebooks. 36 | - Parameterization with Papermill. 37 | See examples: [Parameterizing notebooks to investigate Azure SQL's log stored in Azure Log Analytics](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Parameterization/SQLDBLog/Demo-ExecuteAzureSQLLogAnalytics.ipynb) 38 | Related blog post: https://www.mssqlgirl.com/executing-a-parameterized-notebook-in-azure-data-studio/. 39 | - Official documentation: [Create a parameterized notebook](https://docs.microsoft.com/sql/azure-data-studio/notebooks/notebooks-parameterization). 40 | 3. Jumpstart your Notebooks. 41 | Watch the demo video previously presented at Scottish Summit - Notebooks 101 for SQL People starting at minute [27:07](https://youtu.be/80L-UTOlknw?t=1627). 42 | - Use Azure Data Studio UI to convert one SQL script at a time. 43 | Related blog post: https://www.mssqlgirl.com/inajiffy-converting-from-sql-scripts-to-notebook/ 44 | - Use PowerShellNotebook module for bulk imports, specifically these cmdlets: 45 | - `ConvertTo-PowerShellNotebook` to convert PowerShell scripts to Notebooks with PowerShell kernel. 46 | - `ConvertTo-SQLNotebook` to convert SQL scripts to Notebooks with SQL kernel. 47 | 4. Automatable TSGs with Notebooks. 48 | There are three automation categories: 49 | 1. Automate the Diagnostics / Root Cause Analysis (RCA). 50 | 2. Automate incident filing. 51 | 3. Automate mitigation steps. 52 | 53 | A flow example of the Automate RCA: 54 | - Use Power Automate or Azure Logic Apps to automate the flow, i.e. to automatically trigger when an email is received or if a new task is assigned to you, to send an email after a job is completed with diagnostic or mitigation results. 55 | - Use Azure Automation account and jobs that gets called by Power Automate or Azure Logic Apps, to run a PowerShell script that invokes a notebook (either SQL or PowerShell kernel based). 56 | - Use a Jupyter Notebook with SQL / PowerShell kernel that does a number of diagnostic queries (and mitigation if applicable). 57 | 5. Takeaways and other resources 58 | 59 | 60 | ## Examples of SQL related notebooks 61 | 1. Tiger Toolbox : 62 | - SQL Assessment Toolkit: https://github.com/microsoft/sql-server-samples/tree/master/samples/manage/sql-assessment-api/notebooks 63 | - SQL Hybrid Toolkit: https://github.com/microsoft/tigertoolbox/tree/master/SQL-Hybrid-Cloud-Toolkit/content 64 | 2. Glenn Berry’s Diagnostic Notebooks for SQL Server and Azure SQL: https://glennsqlperformance.com/resources/ 65 | 4. Rob Sewell’s Notebook: http://sqldbawithabeard.com https://github.com/SQLDBAWithABeard/JupyterNotebooks 66 | 4. The SQL Diagnostic (Jupyter) Book by Emanuele Meazzo https://tsql.tech/the-sql-diagnostic-jupyter-book/ 67 | 68 | ## Other useful resources 69 | Submit your issues on Azure Data Studio: https://github.com/microsoft/azuredatastudio/issues 70 | -------------------------------------------------------------------------------- /Notebooks Presentations/User Groups/DatabaseProfessionalsVirtualMeetupGroup-20210728.md: -------------------------------------------------------------------------------- 1 | # Database Professionals Virtual Meetup Group, Jul 28, 2021 2 | 3 | ## Notebooks 101 for SQL People 4 | 5 | Speakers: Julie Koesmarno ([@MsSQLGirl](http://twitter.com/MsSQLGirl) & [Barbara Valdez](https://twitter.com/BarbaraCodes). 6 | 7 | Event website: https://www.meetup.com/dbavug/events/277925201/ 8 | 9 | Slide deck: [Notebooks 101 for SQL People](./Notebooks%20101%20for%20SQL%20People%20-%20Barbara%20&%20Julie.pdf) 10 | 11 | Blog post: [MsSQLGirl.com](http://mssqlgirl.com) 12 | 13 | Go to http://aka.ms/AzureDataStudio to learn more about Azure Data Studio or http://aka.ms/getAzureDataStudio to download Azure Data Studio 14 | 15 | Demo References: 16 | 1. [Data Analysis Template](../NotebookTemplates/DataAnalysisTemplate.ipynb) 17 | 2. [Troubleshooting Guide Template](../NotebookTemplates/TroubleshootingGuideTemplate.ipynb) 18 | 3. [Tutorial / How-To Template](../NotebookTemplates/TutorialTemplate.ipynb) 19 | 4. [SQL Server Diagnostics Book](https://github.com/EmanueleMeazzo/SQL-Server-Diagnostic-Book) by Emanuele Meazzo 20 | 21 | Sample Notebook: 22 | 1. [Data Analysis with Wide World Importers dataset using Notebook with SQL Kernel](Simple%20Demo\Sample%20Notebooks%20-%20Data%20Analysis\WWIReproducibleResearch%20Vol%201.ipynb) 23 | 2. [Sample research with Python](Simple%20Demo\Sample%20Notebooks%20-%20Data%20Analysis\ReproducibleResearch.ipynb) -------------------------------------------------------------------------------- /Notebooks Presentations/User Groups/Notebooks 101 for SQL People - Barbara & Julie.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Notebooks Presentations/User Groups/Notebooks 101 for SQL People - Barbara & Julie.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Jubilant Data Wizards 2 | 3 | A collection of sample scripts from Azure Data Studio (between 2019 and 2021) and Dataverse presentations (from 2021 onwards). 4 | -------------------------------------------------------------------------------- /Simple Demo/KQL Notebooks/Demo-ExecuteParameterizedNotebookKqlmagicLogAnalyticsWithClientSecret.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "python3", 5 | "display_name": "Python 3", 6 | "language": "python" 7 | }, 8 | "language_info": { 9 | "name": "python", 10 | "version": "3.6.6", 11 | "mimetype": "text/x-python", 12 | "codemirror_mode": { 13 | "name": "ipython", 14 | "version": 3 15 | }, 16 | "pygments_lexer": "ipython3", 17 | "nbconvert_exporter": "python", 18 | "file_extension": ".py" 19 | } 20 | }, 21 | "nbformat_minor": 2, 22 | "nbformat": 4, 23 | "cells": [ 24 | { 25 | "cell_type": "markdown", 26 | "source": [ 27 | "# Demo: Execute a notebook using Papermill\r\n", 28 | "\r\n", 29 | "- Author: Julie Koesmarno ([@MsSQLGirl](http://twitter.com/MsSQLgirl))\r\n", 30 | "- Last updated: Nov 8, 2020\r\n", 31 | "\r\n", 32 | "This notebook provides an example of executing a **parameterized** notebook.\r\n", 33 | "\r\n", 34 | "## What you need to provide\r\n", 35 | "1. inputNotebook - this is the local path to the parameterized notebook that you want to execute. \r\n", 36 | "In the example below, I'm executing [Demo-KqlmagicLogAnalyticsWithClientSecret.ipynb](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/KQL%20Notebooks/Demo-KqlmagicLogAnalyticsWithClientSecret.ipynb) that is downloaded to my local path. \r\n", 37 | "2. outputNotebook - this is the local path to the output of the notebook once executed. \r\n", 38 | "3. parameters to the inputNotebook. \r\n", 39 | " For more info about what the parameters mean, please go to [Demo-KqlmagicLogAnalyticsWithClientSecret.ipynb](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/KQL%20Notebooks/Demo-KqlmagicLogAnalyticsWithClientSecret.ipynb).\r\n", 40 | "\r\n", 41 | "## Pre-requisites\r\n", 42 | "1. Azure Data Studio v1.24.0 November release. http://aka.ms/getAzureDataStudio.\r\n", 43 | "2. Papermill package installed on your Python environment for Azure Data Studio. \r\n", 44 | "3. Kqlmagic package installed on your Python environment for Azure Data Studio.\r\n", 45 | "\r\n", 46 | "## Known Issues\r\n", 47 | "* [Parameterized notebook errors on a line break after %%kql which is supposed to work](https://github.com/microsoft/azuredatastudio/issues/13305)\r\n", 48 | "* [Parameterized notebook reorders column values when executed](https://github.com/microsoft/azuredatastudio/issues/13304)\r\n", 49 | "\r\n", 50 | "\r\n", 51 | "\r\n", 52 | "\r\n", 53 | "\r\n", 54 | "\r\n", 55 | "\r\n", 56 | "\r\n", 57 | "\r\n", 58 | "\r\n", 59 | "" 60 | ], 61 | "metadata": { 62 | "azdata_cell_guid": "510bb4d4-e013-4fd4-b894-c3dc15a59dd1" 63 | } 64 | }, 65 | { 66 | "cell_type": "code", 67 | "source": [ 68 | "import papermill as pm\r\n", 69 | "\r\n", 70 | "inputNotebook = 'C:/Temp/KqlmagicDemo/Demo-KqlmagicLogAnalyticsWithClientSecret.ipynb'\r\n", 71 | "outputNotebook = 'c:/Temp/KqlmagicDemo/Output-Demo-KqlmagicLogAnalytics-ClientSecret.ipynb'\r\n", 72 | "\r\n", 73 | "silent = pm.execute_notebook(\r\n", 74 | " inputNotebook,\r\n", 75 | " outputNotebook,\r\n", 76 | " parameters = dict(laConnFile = 'c:/Temp/myLAConnection.txt', showClearText = 'N', laAlias = 'myLAConnection', logicalServerName = 'mySQLDBServerName')\r\n", 77 | ")" 78 | ], 79 | "metadata": { 80 | "azdata_cell_guid": "6af8603a-c534-45bb-80d5-3cd1f742a2e7", 81 | "tags": [] 82 | }, 83 | "outputs": [ 84 | { 85 | "name": "stderr", 86 | "text": "\rExecuting: 0%| | 0/20 [00:00 2 | clientId: 3 | clientSecret: 4 | workspaceId: -------------------------------------------------------------------------------- /Simple Demo/KQL Notebooks/Demo-NativeKusto-CrossServiceQuery.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "kusto", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Cross-service querying with Kusto Notebook in Azure Data Studio\n", 20 | "\n", 21 | "This notebook shows how to query Log Analytics workspace from Azure Data Studio Kusto notebook. \n", 22 | "\n", 23 | "> **Tip**: Ensure that you connect to the ADX cluster with the AAD account that has access to Log Analytics workspace also. \n", 24 | "" 25 | ], 26 | "metadata": { 27 | "azdata_cell_guid": "9001d8c9-6602-4938-a50e-41cfa3598f3a" 28 | } 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "source": [ 33 | "First, change the Kernel to **Kusto** and Attach to **help.kusto.windows.net** and set the database to **Samples**. Run the following query to ensure that it works. " 34 | ], 35 | "metadata": { 36 | "azdata_cell_guid": "4a911ec4-318d-46be-a3b5-7617def51e64" 37 | } 38 | }, 39 | { 40 | "cell_type": "code", 41 | "source": [ 42 | "StormEvents \r\n", 43 | "| take 10" 44 | ], 45 | "metadata": { 46 | "azdata_cell_guid": "c68f8fe6-8df4-4a21-a849-73f71ff1fde1" 47 | }, 48 | "outputs": [], 49 | "execution_count": null 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "source": [ 54 | "Now, we are ready to start querying Log Analytics workspace. For the example below, I'm following the cross-service syntax to connect to Log Analytics.\r\n", 55 | "\r\n", 56 | "```\r\n", 57 | "cluster(https://ade.loganalytics.io/subscriptions//resourcegroups//providers/microsoft.operationalinsights/workspaces/').database(')\r\n", 58 | "```\r\n", 59 | "\r\n", 60 | "You'll need to replace: \r\n", 61 | "* `` with your Subscription ID where your Log Analytics workspace live. e.g. `88a1234b-6cc7-1234-a015-a123bc123456` \r\n", 62 | "* `` with the Resource Group where your Log Analytics workspace live. e.g `myresourcegroup` \r\n", 63 | "* `` with the name of your Log Analytics workspace. e.g. `mssqlgirlla`\r\n", 64 | "\r\n", 65 | "For full list of syntax including for Azure AppInsights, see [Additional Syntax examples](https://docs.microsoft.com/en-us/azure/data-explorer/query-monitor-data#additional-syntax-examples)." 66 | ], 67 | "metadata": { 68 | "azdata_cell_guid": "717e20fb-0814-41e2-9fb3-5f7fb0fb6eb9" 69 | } 70 | }, 71 | { 72 | "cell_type": "code", 73 | "source": [ 74 | "cluster('https://ade.loganalytics.io/subscriptions/88a1234b-6cc7-1234-a015-a123bc123456/resourcegroups/myresourcegroup/providers/microsoft.operationalinsights/workspaces/mssqlgirlla').database('mssqlgirlla').AzureDiagnostics | take 10\r\n", 75 | "| summarize count() by action_name_s, succeeded_s\r\n", 76 | "| take 10" 77 | ], 78 | "metadata": { 79 | "azdata_cell_guid": "d728df32-98bd-49e6-8853-752ebe6f16b6", 80 | "tags": [] 81 | }, 82 | "outputs": [ 83 | { 84 | "output_type": "display_data", 85 | "data": { 86 | "text/html": "Commands completed successfully." 87 | }, 88 | "metadata": {} 89 | }, 90 | { 91 | "output_type": "display_data", 92 | "data": { 93 | "text/html": "Total execution time: 00:00:03.908" 94 | }, 95 | "metadata": {} 96 | }, 97 | { 98 | "output_type": "execute_result", 99 | "metadata": { 100 | "resultSet": { 101 | "id": 0, 102 | "batchId": 0, 103 | "rowCount": 5, 104 | "complete": false, 105 | "columnInfo": [ 106 | { 107 | "isBytes": false, 108 | "isChars": false, 109 | "isSqlVariant": false, 110 | "isUdt": false, 111 | "isXml": false, 112 | "isJson": false, 113 | "sqlDbType": 0, 114 | "isHierarchyId": false, 115 | "isUnknownType": false, 116 | "allowDBNull": true, 117 | "baseCatalogName": "NewDataSet", 118 | "baseColumnName": "action_name_s", 119 | "baseSchemaName": null, 120 | "baseServerName": null, 121 | "baseTableName": "Table_0", 122 | "columnName": "action_name_s", 123 | "columnOrdinal": 0, 124 | "columnSize": -1, 125 | "isAliased": null, 126 | "isAutoIncrement": false, 127 | "isExpression": null, 128 | "isHidden": null, 129 | "isIdentity": null, 130 | "isKey": false, 131 | "isLong": false, 132 | "isReadOnly": false, 133 | "isUnique": false, 134 | "numericPrecision": null, 135 | "numericScale": null, 136 | "udtAssemblyQualifiedName": null, 137 | "dataType": "System.String, System.Private.CoreLib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", 138 | "dataTypeName": "dynamic" 139 | }, 140 | { 141 | "isBytes": false, 142 | "isChars": false, 143 | "isSqlVariant": false, 144 | "isUdt": false, 145 | "isXml": false, 146 | "isJson": false, 147 | "sqlDbType": 0, 148 | "isHierarchyId": false, 149 | "isUnknownType": false, 150 | "allowDBNull": true, 151 | "baseCatalogName": "NewDataSet", 152 | "baseColumnName": "succeeded_s", 153 | "baseSchemaName": null, 154 | "baseServerName": null, 155 | "baseTableName": "Table_0", 156 | "columnName": "succeeded_s", 157 | "columnOrdinal": 1, 158 | "columnSize": -1, 159 | "isAliased": null, 160 | "isAutoIncrement": false, 161 | "isExpression": null, 162 | "isHidden": null, 163 | "isIdentity": null, 164 | "isKey": false, 165 | "isLong": false, 166 | "isReadOnly": false, 167 | "isUnique": false, 168 | "numericPrecision": null, 169 | "numericScale": null, 170 | "udtAssemblyQualifiedName": null, 171 | "dataType": "System.String, System.Private.CoreLib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", 172 | "dataTypeName": "dynamic" 173 | }, 174 | { 175 | "isBytes": false, 176 | "isChars": false, 177 | "isSqlVariant": false, 178 | "isUdt": false, 179 | "isXml": false, 180 | "isJson": false, 181 | "sqlDbType": 0, 182 | "isHierarchyId": false, 183 | "isUnknownType": false, 184 | "allowDBNull": true, 185 | "baseCatalogName": "NewDataSet", 186 | "baseColumnName": "count_", 187 | "baseSchemaName": null, 188 | "baseServerName": null, 189 | "baseTableName": "Table_0", 190 | "columnName": "count_", 191 | "columnOrdinal": 2, 192 | "columnSize": -1, 193 | "isAliased": null, 194 | "isAutoIncrement": false, 195 | "isExpression": null, 196 | "isHidden": null, 197 | "isIdentity": null, 198 | "isKey": false, 199 | "isLong": false, 200 | "isReadOnly": false, 201 | "isUnique": false, 202 | "numericPrecision": null, 203 | "numericScale": null, 204 | "udtAssemblyQualifiedName": null, 205 | "dataType": "System.Int64, System.Private.CoreLib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", 206 | "dataTypeName": "dynamic" 207 | } 208 | ], 209 | "specialAction": null 210 | } 211 | }, 212 | "execution_count": 5, 213 | "data": { 214 | "application/vnd.dataresource+json": { 215 | "schema": { 216 | "fields": [ 217 | { 218 | "name": "action_name_s" 219 | }, 220 | { 221 | "name": "succeeded_s" 222 | }, 223 | { 224 | "name": "count_" 225 | } 226 | ] 227 | }, 228 | "data": [ 229 | { 230 | "0": "AUDIT SESSION CHANGED", 231 | "1": "true", 232 | "2": "53" 233 | }, 234 | { 235 | "0": "DATABASE AUTHENTICATION SUCCEEDED", 236 | "1": "true", 237 | "2": "59" 238 | }, 239 | { 240 | "0": "BATCH COMPLETED", 241 | "1": "true", 242 | "2": "279" 243 | }, 244 | { 245 | "0": "BATCH COMPLETED", 246 | "1": "false", 247 | "2": "25" 248 | }, 249 | { 250 | "0": "RPC COMPLETED", 251 | "1": "true", 252 | "2": "31" 253 | } 254 | ] 255 | }, 256 | "text/html": [ 257 | "", 258 | "", 259 | "", 260 | "", 261 | "", 262 | "", 263 | "", 264 | "
action_name_ssucceeded_scount_
AUDIT SESSION CHANGEDtrue53
DATABASE AUTHENTICATION SUCCEEDEDtrue59
BATCH COMPLETEDtrue279
BATCH COMPLETEDfalse25
RPC COMPLETEDtrue31
" 265 | ] 266 | } 267 | } 268 | ], 269 | "execution_count": 5 270 | } 271 | ] 272 | } -------------------------------------------------------------------------------- /Simple Demo/KQL Notebooks/Demo-ParameterizedKqlmagicLogAnalytics.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "python3", 5 | "display_name": "Python 3", 6 | "language": "python" 7 | }, 8 | "language_info": { 9 | "name": "python", 10 | "version": "3.6.6", 11 | "mimetype": "text/x-python", 12 | "codemirror_mode": { 13 | "name": "ipython", 14 | "version": 3 15 | }, 16 | "pygments_lexer": "ipython3", 17 | "nbconvert_exporter": "python", 18 | "file_extension": ".py" 19 | } 20 | }, 21 | "nbformat_minor": 2, 22 | "nbformat": 4, 23 | "cells": [ 24 | { 25 | "cell_type": "markdown", 26 | "source": [ 27 | "# Test Parameterized Log Analytics notebook\n", 28 | "\n", 29 | "Users need to provide the Log Analytics workspace name and the App Key to be able to connect.\n", 30 | "\n", 31 | "This example specifically analyzes Kubernetes Nodes.\n", 32 | "\n", 33 | "These are the paramters that you can use:\n", 34 | "\n", 35 | "1. workspace='DEMO\\_WORKSPACE'\n", 36 | "2. appkey='DEMO\\_KEY'\n", 37 | "\n", 38 | "This notebook queries data and uses the render operator to visualize it. The visualization uses the ploy.ly library in Python. \n", 39 | "\n", 40 | "> Kqlmagic is a command that extends the capabilities of the Python kernel in Azure Data Studio notebooks. You can combine Python and Kusto query language (KQL) to query and visualize data using rich Plot.ly library integrated with render commands. Kqlmagic brings you the benefit of notebooks, data analysis, and rich Python capabilities all in the same location. Supported data sources with Kqlmagic include Azure Data Explorer, Application Insights, and Azure Monitor logs.\n", 41 | "\n", 42 | " \n", 43 | "\n", 44 | "Prerequisites:\n", 45 | "\n", 46 | "1. Azure Data Studio \n", 47 | "2. Python \n", 48 | "3. Install and set up Kqlmagic in a notebook \n", 49 | "\n", 50 | "More info: [Notebooks with Kqlmagic (Kusto Query Language) in Azure Data Studio - Azure Data Studio | Microsoft Docs](https://docs.microsoft.com/en-us/sql/azure-data-studio/notebooks/notebooks-kqlmagic?view=sql-server-ver15#kqlmagic-with-application-insights)" 51 | ], 52 | "metadata": { 53 | "azdata_cell_guid": "cd509d75-db79-43d9-a3b7-a6a8960c358b" 54 | } 55 | }, 56 | { 57 | "cell_type": "code", 58 | "source": [ 59 | "logAnalyticsWorkspace = ''\r\n", 60 | "logAnalyticsAppKey = ''" 61 | ], 62 | "metadata": { 63 | "azdata_cell_guid": "0796c087-d81e-4ad5-bbaf-c93afee7ac56", 64 | "tags": [ 65 | "parameters" 66 | ] 67 | }, 68 | "outputs": [], 69 | "execution_count": null 70 | }, 71 | { 72 | "cell_type": "code", 73 | "source": [ 74 | "%reload_ext Kqlmagic" 75 | ], 76 | "metadata": { 77 | "azdata_cell_guid": "abb045ac-dee8-4c58-b912-8e4f25bac4b2" 78 | }, 79 | "outputs": [], 80 | "execution_count": null 81 | }, 82 | { 83 | "cell_type": "markdown", 84 | "source": [ 85 | "Connect to Azure Log Analytics (Azure Monitor Log)" 86 | ], 87 | "metadata": { 88 | "azdata_cell_guid": "07579cb8-09e8-4ab8-90af-dbc74f767322" 89 | } 90 | }, 91 | { 92 | "cell_type": "code", 93 | "source": [ 94 | "%kql loganalytics://workspace=logAnalyticsWorkspace;appkey=logAnalyticsAppKey;alias='myworkspace'" 95 | ], 96 | "metadata": { 97 | "azdata_cell_guid": "d80ff325-0f0c-465d-b3ff-33181117d166" 98 | }, 99 | "outputs": [], 100 | "execution_count": null 101 | }, 102 | { 103 | "cell_type": "markdown", 104 | "source": [ 105 | "Analyze Kubernetes Nodes  by Status" 106 | ], 107 | "metadata": { 108 | "azdata_cell_guid": "e04e130b-d614-4e85-9f0e-8fc501ad3bf8" 109 | } 110 | }, 111 | { 112 | "cell_type": "code", 113 | "source": [ 114 | "%%kql\r\n", 115 | "KubeNodeInventory\r\n", 116 | "| summarize event_count=count() by Status, bin(TimeGenerated, 1d)\r\n", 117 | "| render timechart title= 'Daily Kubernetes Nodes'" 118 | ], 119 | "metadata": { 120 | "azdata_cell_guid": "6bb1dc83-0f23-4662-96de-c4be344ec2b7" 121 | }, 122 | "outputs": [], 123 | "execution_count": null 124 | } 125 | ] 126 | } -------------------------------------------------------------------------------- /Simple Demo/KQL Notebooks/TroubleshootAzCLILoginIssue.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Troubleshooting az-cli login issue\n", 20 | "\n", 21 | " \n", 22 | "\n", 23 | "## What does the error look like?\n", 24 | "\n", 25 | "```\n", 26 | "---------------------------------------------------------------------------\n", 27 | "ModuleNotFoundError Traceback (most recent call last)\n", 28 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\my_aad_helper.py in _get_azcli_token(self, subscription)\n", 29 | " 634 # from azure.cli.core._profile import _CLIENT_ID as AZCLI_CLIENT_ID\n", 30 | "--> 635 from azure.common.credentials import get_cli_profile\n", 31 | " 636 try:\n", 32 | " \n", 33 | "ModuleNotFoundError: No module named 'azure.common'\n", 34 | " \n", 35 | "During handling of the above exception, another exception occurred:\n", 36 | " \n", 37 | "TypeError Traceback (most recent call last)\n", 38 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\my_aad_helper.py in acquire_token(self)\n", 39 | " 221 if self._options.get(\"try_azcli_login\"):\n", 40 | "--> 222 token = self._get_azcli_token()\n", 41 | " 223 self._current_token = self._validate_and_refresh_token(token)\n", 42 | " \n", 43 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\my_aad_helper.py in _get_azcli_token(self, subscription)\n", 44 | " 641 pass\n", 45 | "--> 642 except [ImportError, ModuleNotFoundError]:\n", 46 | " 643 raise AuthenticationError(\"Azure CLI authentication requires 'azure-cli-core' to be installed.\")\n", 47 | " \n", 48 | "TypeError: catching classes that do not inherit from BaseException is not allowed\n", 49 | " \n", 50 | "During handling of the above exception, another exception occurred:\n", 51 | " \n", 52 | "AuthenticationError Traceback (most recent call last)\n", 53 | " in \n", 54 | "----> 1 get_ipython().run_line_magic('kql', 'loganalytics://code;tenant=\"72f988bf-86f1-41af-91ab-2d7cd011db47\";workspace=\"c3de7517-0d7c-45df-90c1-a4e43fc03131\";alias=\"myLogAnalyticsWorkspace\" -try-azcli_login -!se')\n", 55 | " \n", 56 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\IPython\\core\\interactiveshell.py in run_line_magic(self, magic_name, line, _stack_depth)\n", 57 | " 2305 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals\n", 58 | " 2306 with self.builtin_trap:\n", 59 | "-> 2307 result = fn(*args, **kwargs)\n", 60 | " 2308 return result\n", 61 | " 2309 \n", 62 | " \n", 63 | " in execute(self, line, cell, local_ns, override_vars, override_options, override_query_properties, override_connection, override_result_set)\n", 64 | " \n", 65 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\IPython\\core\\magic.py in (f, *a, **k)\n", 66 | " 185 # but it's overkill for just that one bit of state.\n", 67 | " 186 def magic_deco(arg):\n", 68 | "--> 187 call = lambda f, *a, **k: f(*a, **k)\n", 69 | " 188 \n", 70 | " 189 if callable(arg):\n", 71 | " \n", 72 | " in execute(self, line, cell, local_ns, override_vars, override_options, override_query_properties, override_connection, override_result_set)\n", 73 | " \n", 74 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\IPython\\core\\magic.py in (f, *a, **k)\n", 75 | " 185 # but it's overkill for just that one bit of state.\n", 76 | " 186 def magic_deco(arg):\n", 77 | "--> 187 call = lambda f, *a, **k: f(*a, **k)\n", 78 | " 188 \n", 79 | " 189 if callable(arg):\n", 80 | " \n", 81 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\kql_magic.py in execute(self, line, cell, local_ns, override_vars, override_options, override_query_properties, override_connection, override_result_set)\n", 82 | " 671 override_query_properties=override_query_properties,\n", 83 | " 672 override_connection=override_connection,\n", 84 | "--> 673 override_result_set=override_result_set)\n", 85 | " 674 \n", 86 | " 675 return result\n", 87 | " \n", 88 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\kql_magic_core.py in execute(self, line, cell, local_ns, override_vars, override_options, override_query_properties, override_connection, override_result_set)\n", 89 | " 679 \n", 90 | " 680 if command is None or command == \"submit\":\n", 91 | "--> 681 result = self._execute_query(parsed, user_ns, result_set=override_result_set, override_vars=override_vars)\n", 92 | " 682 if type(result) == ResultSet:\n", 93 | " 683 # can't just return result as is, it fails when used with table package pandas_show_schema\n", 94 | " \n", 95 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\kql_magic_core.py in _execute_query(self, parsed, user_ns, result_set, override_vars)\n", 96 | " 1102 return None\n", 97 | " 1103 else:\n", 98 | "-> 1104 raise e\n", 99 | " 1105 \n", 100 | " 1106 \n", 101 | " \n", 102 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\kql_magic_core.py in _execute_query(self, parsed, user_ns, result_set, override_vars)\n", 103 | " 929 retry_with_code = True\n", 104 | " 930 else:\n", 105 | "--> 931 raise e\n", 106 | " 932 \n", 107 | " 933 if retry_with_code:\n", 108 | " \n", 109 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\kql_magic_core.py in _execute_query(self, parsed, user_ns, result_set, override_vars)\n", 110 | " 921 retry_with_code = False\n", 111 | " 922 try:\n", 112 | "--> 923 conn.validate(**options)\n", 113 | " 924 conn.set_validation_result(True)\n", 114 | " 925 except Exception as e:\n", 115 | " \n", 116 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\kql_engine.py in validate(self, **options)\n", 117 | " 155 raise KqlEngineError(\"Client is not defined.\")\n", 118 | " 156 query = \"range c from 1 to 10 step 1 | count\"\n", 119 | "--> 157 response = client.execute(self.get_database(), query, accept_partial_results=False, **options)\n", 120 | " 158 # print(f\">>> json_response: {response.json_response}\")\n", 121 | " 159 table = KqlResponse(response, **options).tables[0]\n", 122 | " \n", 123 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\draft_client.py in execute(self, id, query, accept_partial_results, **options)\n", 124 | " 175 \n", 125 | " 176 if self._aad_helper is not None:\n", 126 | "--> 177 request_headers[\"Authorization\"] = self._aad_helper.acquire_token()\n", 127 | " 178 elif self._appkey is not None:\n", 128 | " 179 request_headers[\"x-api-key\"] = self._appkey\n", 129 | " \n", 130 | "~\\azuredatastudio-python\\0.0.1\\lib\\site-packages\\Kqlmagic\\my_aad_helper.py in acquire_token(self)\n", 131 | " 399 except Exception as e:\n", 132 | " 400 kwargs = self._get_authentication_error_kwargs()\n", 133 | "--> 401 raise AuthenticationError(e, **kwargs)\n", 134 | " 402 \n", 135 | " 403 \n", 136 | " \n", 137 | "AuthenticationError: AuthenticationError('azcli_login', 'TypeError('catching classes that do not inherit from BaseException is not allowed',)', '{'authority': 'azcli_login', 'authentication_method': 'azcli_login', 'resource': 'https://api.loganalytics.io'}')\n", 138 | "\n", 139 | "```" 140 | ], 141 | "metadata": { 142 | "azdata_cell_guid": "56c7edfd-ed80-42c8-a55c-23b257e60660" 143 | } 144 | }, 145 | { 146 | "cell_type": "markdown", 147 | "source": [ 148 | "It usually means that azure.common is not installed properly. Check in `azuredatastudio-python/lib/site-packages/azure/common/__init__.py exists`.\n", 149 | "\n", 150 | "If it does exist, then it's best to upgrade azure.common package or reinstall. For mine, I just upgraded because I was using an older version on azure.common (from1.1.23 to 1.1.26)" 151 | ], 152 | "metadata": { 153 | "azdata_cell_guid": "32492fa9-f641-4917-b356-7d2fe3626db1" 154 | } 155 | }, 156 | { 157 | "cell_type": "markdown", 158 | "source": [ 159 | "This is to quickly check that azure.common works or not." 160 | ], 161 | "metadata": { 162 | "azdata_cell_guid": "9804a025-b53f-45ce-8a2c-20c4a9b3f1f6" 163 | } 164 | }, 165 | { 166 | "cell_type": "code", 167 | "source": [ 168 | "from azure.common.credentials import get_cli_profile" 169 | ], 170 | "metadata": { 171 | "azdata_cell_guid": "251c6071-e565-4132-9713-aaba2262cee1" 172 | }, 173 | "outputs": [], 174 | "execution_count": null 175 | }, 176 | { 177 | "cell_type": "markdown", 178 | "source": [ 179 | "Check my current environment." 180 | ], 181 | "metadata": { 182 | "azdata_cell_guid": "b3392dc0-9aa5-41c3-9180-07306c1d4c47" 183 | } 184 | }, 185 | { 186 | "cell_type": "code", 187 | "source": [ 188 | "import sys\r\n", 189 | "print(sys.path)" 190 | ], 191 | "metadata": { 192 | "azdata_cell_guid": "41fd9a7a-ae59-4bea-a237-9c28c272e2aa" 193 | }, 194 | "outputs": [], 195 | "execution_count": null 196 | }, 197 | { 198 | "cell_type": "markdown", 199 | "source": [ 200 | "May have to reinstall a new azure-common from the ADS Package UI." 201 | ], 202 | "metadata": { 203 | "azdata_cell_guid": "d8bf9097-fed4-44a0-bf48-86d6f1747044" 204 | } 205 | } 206 | ] 207 | } -------------------------------------------------------------------------------- /Simple Demo/Papermill/readme.md: -------------------------------------------------------------------------------- 1 | # Demo script 2 | 3 | ## Parameterized Notebook demo 4 | ### Papermill 5 | > Talking point: To automate Notebooks a **command line interface** to execute them would be handy. Papermill is an open source tool you could use to run Notebooks from a command line interface. **Papermill lets you execute Notebooks and parameterize Notebooks**. 6 | 1. Login using "az login" before the demo and have things ready. This way there will be no login prompts during the demo. 7 | 2. Show KqlmagicParameterizedQuery.ipynb in ADS, and what the Notebook does 8 | 3. Jump to powershell command prompt: 9 | 1. Execute Notebook for Florida using Papermill 10 | - papermill .\KqlmagicParameterizedQuery.ipynb .\StormEventsFlorida.ipynb -p stateFilter "FLORIDA" 11 | 2. Execute Notebook for Washington using Papermill 12 | - papermill .\KqlmagicParameterizedQuery.ipynb .\StormEventsWashington.ipynb -p stateFilter "WASHINGTON" 13 | 3. Open both Notebooks in ADS and show the results 14 | 4. Use split view to have some fun 15 | 16 | > Talking point: This opens up new opportunities for how notebooks can be used. For example: 1) If you have a multi-region service, you might want to run the same **analysis on different regions using parameterization**. If you find any issues, you might want to open incidents on it or mitigate the issue. 2) If you find a particular issue, you might want to run another Notebook to diagnose further. You can now **programmatically build workflows**. 17 | 18 | > Once you capture these important steps into Notebooks, you can execute them yourself manually, automate them and run them in the environment of your choice. **Create once and use everywhere**. 19 | 20 | ### Run with parameters 21 | > Azure Data Studio provides an easy way to **input parameters manually** and re-run the Notebook. 22 | 1. Click on "Run with parameters" button on the top right. 23 | 2. Type in "ILLINOIS"; into the input box and execute. 24 | 25 | > Azure Data Studio creates a **copy of the file** for you so that you do not overwrite the original, and then it is available for you to store as a separate copy. -------------------------------------------------------------------------------- /Simple Demo/Parameterization/SQLDBLog/AzureAutomationRunbookTutorial.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "powershell", 5 | "display_name": "PowerShell", 6 | "language": "powershell" 7 | }, 8 | "language_info": { 9 | "name": "powershell", 10 | "codemirror_mode": "shell", 11 | "mimetype": "text/x-sh", 12 | "file_extension": ".ps1" 13 | }, 14 | "extensions": { 15 | "azuredatastudio": { 16 | "version": 1, 17 | "views": [] 18 | } 19 | } 20 | }, 21 | "nbformat_minor": 2, 22 | "nbformat": 4, 23 | "cells": [ 24 | { 25 | "cell_type": "markdown", 26 | "source": [ 27 | "# Automating Azure SQL Diagnostic notebook execution" 28 | ], 29 | "metadata": { 30 | "azdata_cell_guid": "9fb4903e-f618-45b5-b728-4c6706591649", 31 | "extensions": { 32 | "azuredatastudio": { 33 | "views": [] 34 | } 35 | } 36 | }, 37 | "attachments": {} 38 | }, 39 | { 40 | "cell_type": "markdown", 41 | "source": [ 42 | "This notebook outlines how you can use Azure Automation to schedule SQL Notebook execution. \n", 43 | "\n", 44 | "## Setup pre-requisites\n", 45 | "\n", 46 | "1. In Azure Automation, create a Runbook and choose PowerShell as the Runbook type.\n", 47 | "2. Create a service principal as per [Create Azure AD users using service principals - Azure SQL Database | Microsoft Docs](https://docs.microsoft.com/en-us/azure/azure-sql/database/authentication-aad-service-principal-tutorial#create-the-service-principal-user-in-azure-sql-database). The name of the Azure Automation is the user name that you will have to create in the service database. \n", 48 | "3. Azure Automation Runbook is using Managed Identity for execution. More info about Managed Identity is [here](https://docs.microsoft.com/en-us/powershell/module/sqlserver/invoke-sqlcmd?view=sqlserver-ps&source=docs#example-11--connect-to-azure-sql-database--or-managed-instance--using-an-access-token)\n", 49 | "4. Create an Azure Key Vault Secrets to access the blob storage where the output notebook will be stored. Setup Access Policy as per [Assign an Azure Key Vault access policy (Portal) | Microsoft Docs](https://docs.microsoft.com/en-us/azure/key-vault/general/assign-access-policy-portal) to link the Azure Automation account to the blob storage.\n", 50 | "\n", 51 | "The DBDiagnostic notebook is located [here](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/SQL%20Notebooks/DBDiagnostics.ipynb) and the equivalent raw path of this notebook which is fetched from the code below is https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/main/Simple%20Demo/SQL%20Notebooks/DBDiagnostics.ipynb.\n", 52 | "" 53 | ], 54 | "metadata": { 55 | "azdata_cell_guid": "e604eb40-d2fe-4dc6-9d10-c2d10a79b042", 56 | "extensions": { 57 | "azuredatastudio": { 58 | "views": [] 59 | } 60 | } 61 | }, 62 | "attachments": {} 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "source": [ 67 | "## The logic / approach\r\n", 68 | "\r\n", 69 | "Below is the Azure Automation Runbook definition, which takes an input paramter called `$Title`.\r\n", 70 | "\r\n", 71 | "The flow is as follows:\r\n", 72 | "1. parses the parameter `$Title` which contains server name and database name in square braces. Example:\r\n", 73 | "`Slow Query [SQLInstanceName][DatabaseName]`\r\n", 74 | "\r\n", 75 | "2. Fetch the notebook to run from https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/main/Simple%20Demo/SQL%20Notebooks/DBDiagnostics.ipynb. \r\n", 76 | "\r\n", 77 | "3. Get access token from Azure Automation to connect to the SQL Instance. \r\n", 78 | "\r\n", 79 | "4. Invoke it against the SQL instance and database as retreved from Step 1, using `Invoke-SqlNotebook` with the access token from Step 3. \r\n", 80 | "\r\n", 81 | "5. Get the SAS Key to access the storage account. \r\n", 82 | "\r\n", 83 | "6. Save the output notebook to Blob Storage using the SAS Key.\r\n", 84 | "\r\n", 85 | "7. Return the path to the output notebook.\r\n", 86 | "\r\n", 87 | "Below code cell implements the approach above. \r\n", 88 | "" 89 | ], 90 | "metadata": { 91 | "azdata_cell_guid": "b3cce568-3111-466f-91f7-ed05f7007105", 92 | "extensions": { 93 | "azuredatastudio": { 94 | "views": [] 95 | } 96 | } 97 | }, 98 | "attachments": {} 99 | }, 100 | { 101 | "cell_type": "code", 102 | "source": [ 103 | "param (\r\n", 104 | " [Parameter(Mandatory=$true)][String]$Title \r\n", 105 | " )\r\n", 106 | " \r\n", 107 | " if($Title -match \"\\[([^\\]]+)\\]\\[([^\\]]+)\\]\")\r\n", 108 | " {\r\n", 109 | " # Server and database\r\n", 110 | " $ServerName = $matches[1]\r\n", 111 | " $DatabaseName = $matches[2]\r\n", 112 | "\r\n", 113 | " Import-Module SQLServer\r\n", 114 | " Import-Module Az.Accounts -MinimumVersion 2.2.0\r\n", 115 | "\r\n", 116 | " # Logging in \r\n", 117 | " $ac = Connect-AzAccount -Identity\r\n", 118 | "\r\n", 119 | " # Set the file name\r\n", 120 | " $inputNB = \"DBDiagnostic.ipynb\"\r\n", 121 | " $outputNB = \"DBDiagnostic_$(get-date -f yyyy-MM-dd).ipynb\"\r\n", 122 | "\r\n", 123 | " # Downloading the notebook that we want to run and store it in inputNB \r\n", 124 | " $sourceNB = \"https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/main/Simple%20Demo/SQL%20Notebooks/DBDiagnostics.ipynb\"\r\n", 125 | " Invoke-WebRequest $sourceNB -OutFile $inputNB\r\n", 126 | "\r\n", 127 | " # Getting access token for this app to connect to Azure SQL \r\n", 128 | " $sqlAccessToken = (Get-AzAccessToken -ResourceUrl https://database.windows.net).Token\r\n", 129 | "\r\n", 130 | " # Now that we have the token, we use it to connect to an Azure SQL DB and run the notebook\r\n", 131 | " # inputNB and save the executed notebook as outputNB \r\n", 132 | " $result = Invoke-SqlNotebook -ServerInstance $ServerName -Database $DatabaseName -AccessToken $sqlAccessToken `\r\n", 133 | " -InputFile $inputNB -OutputFile $outputNB \r\n", 134 | "\r\n", 135 | " # Get the SAS token for Azure Storage \r\n", 136 | " $secret = Get-AzKeyVaultSecret -VaultName \"myAKV\" -Name \"mySASKey\"\r\n", 137 | " $ssPtr = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR($secret.SecretValue)\r\n", 138 | " try {\r\n", 139 | " $storageAccountKey = [System.Runtime.InteropServices.Marshal]::PtrToStringBSTR($ssPtr)\r\n", 140 | " } finally {\r\n", 141 | " [System.Runtime.InteropServices.Marshal]::ZeroFreeBSTR($ssPtr)\r\n", 142 | " }\r\n", 143 | "\r\n", 144 | " # Write to Storage Account\r\n", 145 | " $storageAccount = \"mystorageaccount\" \r\n", 146 | " $blob = \"myblob\"\r\n", 147 | "\r\n", 148 | " # Saving to blob and return the Uri\r\n", 149 | " $result = New-AzStorageContext -StorageAccountName $storageAccount -StorageAccountKey $storageAccountKey `\r\n", 150 | " | Set-AzStorageBlobContent -Container $blob -File $outputNB -Blob $outputNB -Force\r\n", 151 | "\r\n", 152 | " $blobUri = $result.ICloudBlob.Uri.AbsoluteUri\r\n", 153 | "\r\n", 154 | " # Return the URI of the output notebook\r\n", 155 | " $blobUri\r\n", 156 | " }\r\n", 157 | "\r\n", 158 | "" 159 | ], 160 | "metadata": { 161 | "azdata_cell_guid": "370bd39c-0884-42e7-819c-1d03dedf98eb", 162 | "extensions": { 163 | "azuredatastudio": { 164 | "views": [] 165 | } 166 | } 167 | }, 168 | "outputs": [], 169 | "execution_count": null 170 | } 171 | ] 172 | } -------------------------------------------------------------------------------- /Simple Demo/Parameterization/SQLDBLog/Demo-ExecuteAzureSQLLogAnalytics.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "python3", 5 | "display_name": "Python 3", 6 | "language": "python" 7 | }, 8 | "language_info": { 9 | "name": "python", 10 | "version": "3.8.10", 11 | "mimetype": "text/x-python", 12 | "codemirror_mode": { 13 | "name": "ipython", 14 | "version": 3 15 | }, 16 | "pygments_lexer": "ipython3", 17 | "nbconvert_exporter": "python", 18 | "file_extension": ".py" 19 | } 20 | }, 21 | "nbformat_minor": 2, 22 | "nbformat": 4, 23 | "cells": [ 24 | { 25 | "cell_type": "markdown", 26 | "source": [ 27 | "# Demo: Execute a parameterized notebook\r\n", 28 | "\r\n", 29 | "- Author: Julie Koesmarno ([@MsSQLGirl](http://twitter.com/MsSQLgirl))\r\n", 30 | "- Last updated: Mar 7, 2021\r\n", 31 | "\r\n", 32 | "This notebook provides a few options of executing a **parameterized** notebook.\r\n", 33 | "\r\n", 34 | "- [Option 1: Embedding a link in a text cell](#option-1-embedding-a-link-in-a-text-cell)\r\n", 35 | "- [Option 2: Execute a local parameterized notebook in a code cell](#option-2-execute-a-local-parameterized-notebook-in-a-code-cell)\r\n", 36 | "- [Option 3: Create a dynamic link to parameterized notebook in GitHub](#option-3-create-a-dynamic-link-to-parameterized-notebook-in-github)\r\n", 37 | "\r\n", 38 | "A few notes about the parameterized notebook that we will execute today:\r\n", 39 | "* [AzureSQLLogsAndMetricsWithLogAnalytics.ipynb](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Parameterization/SQLDBLog/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb) analyzes log data of an Azure SQL server.\r\n", 40 | "* If you have an existing Log Analytics workspace, you can use it when running / trying out this parameterized notebook execution demo. If you don't have it, it's OK, we will use a dummy GUID. The notebook should run but it won't provide any log data analysis. \r\n", 41 | "* Please review the Pre-requisites below. \r\n", 42 | "\r\n", 43 | "## Pre-requisites\r\n", 44 | "1. Azure Data Studio v1.26.1 Feb 2021 release. http://aka.ms/getAzureDataStudio.\r\n", 45 | "2. Papermill package installed on your Python environment for Azure Data Studio. \r\n", 46 | "3. Kqlmagic package installed on your Python environment for Azure Data Studio.\r\n", 47 | "4. [azcli](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli) installed as we will be relying on Azure CLI login to connect to Azure Log Analytics workspace in [AzureSQLLogsAndMetricsWithLogAnalytics.ipynb](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Parameterization/SQLDBLog/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb). \r\n", 48 | "\r\n", 49 | "\r\n", 50 | "\r\n", 51 | "\r\n", 52 | "\r\n", 53 | "\r\n", 54 | "\r\n", 55 | "\r\n", 56 | "\r\n", 57 | "\r\n", 58 | "\r\n", 59 | "\r\n", 60 | "" 61 | ], 62 | "metadata": { 63 | "azdata_cell_guid": "510bb4d4-e013-4fd4-b894-c3dc15a59dd1" 64 | } 65 | }, 66 | { 67 | "cell_type": "code", 68 | "source": [ 69 | "## This is optional\r\n", 70 | "import sys\r\n", 71 | "!{sys.executable} -m pip install --upgrade pip\r\n", 72 | "!{sys.executable} -m pip install Kqlmagic --no-cache-dir --upgrade\r\n", 73 | "!{sys.executable} -m pip install papermill --no-cache-dir --upgrade" 74 | ], 75 | "metadata": { 76 | "azdata_cell_guid": "ab685df7-08ef-471e-8d7e-d2b61d005f44" 77 | }, 78 | "outputs": [], 79 | "execution_count": null 80 | }, 81 | { 82 | "cell_type": "markdown", 83 | "source": [ 84 | "## Known Issues\r\n", 85 | "If you plan to create your own parameterized notebook like [AzureSQLLogsAndMetricsWithLogAnalytics.ipynb](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Parameterization/SQLDBLog/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb), please note the following known issues.\r\n", 86 | "* [Parameterized notebook errors on a line break after %%kql which is supposed to work](https://github.com/microsoft/azuredatastudio/issues/13305). \r\n", 87 | " > **Workaround**: ensure that there is a space next to `%%kql` to `%%kql ` before a new line break.\r\n", 88 | "" 89 | ], 90 | "metadata": { 91 | "azdata_cell_guid": "a7b77e75-8c8d-43c8-9f8e-be9e19a6bcf5" 92 | } 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "source": [ 97 | "## Option 1: Embedding a link in a text cell\r\n", 98 | "You can also create a link like this to open the parameterized notebook with injected parameter. With this option, you the parameters passed in are static, i.e. built in as part of this Text Cell. \r\n", 99 | "\r\n", 100 | "URI with Injected Parameter cell Test: [azuredatastudio://microsoft.notebook/open?url=https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/main/Simple%20Demo/Parameterization/SQLDBLog/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb?workspaceID='658bf3c6-6099-4167-8084-58aca4529c30'](azuredatastudio://microsoft.notebook/open?url=https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/main/Simple%20Demo/Parameterization/SQLDBLog/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb?workspaceID='658bf3c6-6099-4167-8084-58aca4529c30') \r\n", 101 | "\r\n", 102 | "> **Tip**: I'm using a dummy workspace ID value. While the notebook runs, it might be saying that it cannot find the workspace ID in your environment. So you'll need to update this value both in the text and the underlying link." 103 | ], 104 | "metadata": { 105 | "azdata_cell_guid": "a6342cd2-6e0f-4f71-a315-164fc1e508b9" 106 | } 107 | }, 108 | { 109 | "cell_type": "markdown", 110 | "source": [ 111 | "## Option 2: Execute a local parameterized notebook in a code cell\r\n", 112 | "\r\n", 113 | "> **Tip**: ensure that you have [Pre-requisites](#Pre-requisites) are all satisfied before continuing. \r\n", 114 | "\r\n", 115 | "### What you need to provide for the next two cells to work:\r\n", 116 | "\r\n", 117 | "1. inputNotebook - this is the local path to the parameterized notebook that you want to execute. \r\n", 118 | "In the example below, I'm executing [AzureSQLLogsAndMetricsWithLogAnalytics.ipynb](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/main/Simple%20Demo/Parameterization/SQLDBLog/AzureSQLLogsAndMetricsWithLogAnalytics.ipynb) that is downloaded to my local path. \r\n", 119 | "2. outputNotebook - this is the local path to the output of the notebook once executed. \r\n", 120 | "3. parameters to the inputNotebook. \r\n", 121 | " * workspaceID = this is the Workspace ID of the Azure Log Analytics.\r\n", 122 | " \r\n", 123 | "This option is more **interactive / dynamic**. \r\n", 124 | "\r\n", 125 | "\r\n", 126 | "\r\n", 127 | "> **Tip**: Use this random guid `658bf3c6-6099-4167-8084-58aca4529c30` as an example. The notebook (inputNotebook) will still run, but it might not have the input you need. \r\n", 128 | "" 129 | ], 130 | "metadata": { 131 | "azdata_cell_guid": "7f074d17-22a4-40db-bfc5-6e72d4ffc4d7" 132 | } 133 | }, 134 | { 135 | "cell_type": "code", 136 | "source": [ 137 | "## Mask the Workspace ID input, and only print if running in debug mode\r\n", 138 | "\r\n", 139 | "import getpass\r\n", 140 | "inputWorkspaceID = getpass.getpass(\"Enter Log Analytics Workspace ID\")\r\n", 141 | "\r\n", 142 | "isDebug = input (\"debug mode? Y/N\")\r\n", 143 | "\r\n", 144 | "if isDebug.lower() == \"y\":\r\n", 145 | " print(inputWorkspaceID)" 146 | ], 147 | "metadata": { 148 | "azdata_cell_guid": "169d4f01-6908-482e-8d09-facd5d3f2bbc", 149 | "tags": [] 150 | }, 151 | "outputs": [], 152 | "execution_count": 1 153 | }, 154 | { 155 | "cell_type": "markdown", 156 | "source": [ 157 | "Now execute the parameterized notebook:" 158 | ], 159 | "metadata": { 160 | "azdata_cell_guid": "faefbb91-d526-4502-b3b9-0016d4dbdc86" 161 | } 162 | }, 163 | { 164 | "cell_type": "code", 165 | "source": [ 166 | "import papermill as pm\r\n", 167 | "\r\n", 168 | "inputNotebook = './AzureSQLLogsAndMetricsWithLogAnalytics.ipynb'\r\n", 169 | "outputNotebook = 'c:/temp/Output.ipynb'\r\n", 170 | "\r\n", 171 | "silent = pm.execute_notebook(\r\n", 172 | " inputNotebook,\r\n", 173 | " outputNotebook,\r\n", 174 | " parameters = dict(workspaceID = inputWorkspaceID)\r\n", 175 | ")" 176 | ], 177 | "metadata": { 178 | "azdata_cell_guid": "9dff63fd-a8f2-4b1d-936d-02c07e24704d", 179 | "tags": [] 180 | }, 181 | "outputs": [], 182 | "execution_count": null 183 | }, 184 | { 185 | "cell_type": "markdown", 186 | "source": [ 187 | "## Option 3: Create a dynamic link to parameterized notebook in GitHub\r\n", 188 | "\r\n", 189 | "### What you need to provide for the next two cells to work:\r\n", 190 | "WorkspaceID. \r\n", 191 | "\r\n", 192 | "This option is more **interactive / dynamic**. \r\n", 193 | "\r\n", 194 | "> **Tip**: Use this random guid `658bf3c6-6099-4167-8084-58aca4529c30` as an example. The notebook (inputNotebook) will still run, but it might not have the input you need. " 195 | ], 196 | "metadata": { 197 | "azdata_cell_guid": "ea9fa3ef-cd02-46f1-a763-925e46ab2f1d" 198 | } 199 | }, 200 | { 201 | "cell_type": "code", 202 | "source": [ 203 | "## Mask the Workspace ID input, and only print if running in debug mode\r\n", 204 | "\r\n", 205 | "import getpass\r\n", 206 | "\r\n", 207 | "if \"inputWorkspaceID\" not in locals():\r\n", 208 | " inputWorkspaceIDForURL = getpass.getpass(\"Enter Log Analytics Workspace ID\")\r\n", 209 | "else: \r\n", 210 | " inputWorkspaceIDForURL = inputWorkspaceID\r\n", 211 | "\r\n", 212 | "isDebug = input (\"debug mode? Y/N\")\r\n", 213 | "\r\n", 214 | "if isDebug.lower() == \"y\":\r\n", 215 | " print(inputWorkspaceIDForURL)" 216 | ], 217 | "metadata": { 218 | "azdata_cell_guid": "19b7ea62-2407-4339-8c87-93b323360cb3" 219 | }, 220 | "outputs": [ 221 | { 222 | "output_type": "stream", 223 | "name": "stdout", 224 | "text": "658bf3c6-6099-4167-8084-58aca4529c30\n" 225 | } 226 | ], 227 | "execution_count": 4 228 | }, 229 | { 230 | "cell_type": "code", 231 | "source": [ 232 | "import re, os\r\n", 233 | "from IPython.display import *\r\n", 234 | "display(HTML(\"

Click here to open parameterized notebook and press Run all

\"))\r\n", 235 | "" 236 | ], 237 | "metadata": { 238 | "azdata_cell_guid": "922cdc47-ee3e-4c40-91b4-06dc48a0d258" 239 | }, 240 | "outputs": [ 241 | { 242 | "output_type": "display_data", 243 | "data": { 244 | "text/plain": "", 245 | "text/html": "

Click here to open parameterized notebook and press Run all

" 246 | }, 247 | "metadata": {} 248 | } 249 | ], 250 | "execution_count": 6 251 | } 252 | ] 253 | } -------------------------------------------------------------------------------- /Simple Demo/Parameterization/readme.md: -------------------------------------------------------------------------------- 1 | # Paramterization examples -------------------------------------------------------------------------------- /Simple Demo/PowerShell Notebooks/SimpleAdditionInPowerShell.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "powershell", 5 | "display_name": "PowerShell", 6 | "language": "powershell" 7 | }, 8 | "language_info": { 9 | "name": "powershell", 10 | "codemirror_mode": "shell", 11 | "mimetype": "text/x-sh", 12 | "file_extension": ".ps1" 13 | } 14 | }, 15 | "nbformat_minor": 2, 16 | "nbformat": 4, 17 | "cells": [ 18 | { 19 | "cell_type": "code", 20 | "source": [ 21 | "$a = 10\r\n", 22 | "$b = 5" 23 | ], 24 | "metadata": { 25 | "azdata_cell_guid": "3726ff67-9b0c-4955-a478-f84b5a8a41c9", 26 | "tags": [ 27 | "parameters" 28 | ] 29 | }, 30 | "outputs": [], 31 | "execution_count": null 32 | }, 33 | { 34 | "cell_type": "code", 35 | "source": [ 36 | "$a + $b" 37 | ], 38 | "metadata": { 39 | "azdata_cell_guid": "23ee68b9-02e2-4c14-893b-a29c2a27e7cf" 40 | }, 41 | "outputs": [], 42 | "execution_count": null 43 | } 44 | ] 45 | } -------------------------------------------------------------------------------- /Simple Demo/SQL Notebooks/Blursday.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "Happy \"What ever day of the week is this\" Day 🤩\r\n", 20 | "" 21 | ], 22 | "metadata": { 23 | "azdata_cell_guid": "2ae68e7b-be77-4aef-8bae-a90a26792f7a" 24 | } 25 | }, 26 | { 27 | "cell_type": "code", 28 | "source": [ 29 | "SELECT DATEDIFF(DAY, '20200303', GETDATE()) AS DayNumber" 30 | ], 31 | "metadata": { 32 | "azdata_cell_guid": "3ff47c9e-6a53-4928-acc5-0dd388f9fa35" 33 | }, 34 | "outputs": [ 35 | { 36 | "output_type": "display_data", 37 | "data": { 38 | "text/html": "(1 row affected)" 39 | }, 40 | "metadata": {} 41 | }, 42 | { 43 | "output_type": "display_data", 44 | "data": { 45 | "text/html": "Total execution time: 00:00:00.052" 46 | }, 47 | "metadata": {} 48 | }, 49 | { 50 | "output_type": "execute_result", 51 | "metadata": {}, 52 | "execution_count": 1, 53 | "data": { 54 | "application/vnd.dataresource+json": { 55 | "schema": { 56 | "fields": [ 57 | { 58 | "name": "DayNumber" 59 | } 60 | ] 61 | }, 62 | "data": [ 63 | { 64 | "0": "513" 65 | } 66 | ] 67 | }, 68 | "text/html": [ 69 | "", 70 | "", 71 | "", 72 | "
DayNumber
513
" 73 | ] 74 | } 75 | } 76 | ], 77 | "execution_count": 1 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "source": [ 82 | "\r\n", 83 | "![](https://i.pinimg.com/originals/0d/d9/fe/0dd9fec410bd390145ccb28e9b00f029.gif)" 84 | ], 85 | "metadata": { 86 | "azdata_cell_guid": "c0d22195-7b3e-4b11-9aa2-59e7d7359f84" 87 | } 88 | } 89 | ] 90 | } -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/20200513 - Notebooks 101 for SQL People.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/20200513 - Notebooks 101 for SQL People.pptx -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/NativeKusto-AggregatesInKusto.kql: -------------------------------------------------------------------------------- 1 | // Simple examples ideal for Charting and SandDance 2 | 3 | // Simple example to get 10 states by count 4 | StormEvents 5 | | summarize Count = count() by State 6 | | sort by Count 7 | | limit 10 8 | 9 | // Simple example to do smart time binning 10 | StormEvents 11 | | summarize EventCount =count() by bin(StartTime, 7d), State 12 | 13 | // Simple example to do smart time binning for three states only 14 | StormEvents 15 | | where State in ("TEXAS", "WASHINGTON", "CALIFORNIA") 16 | | summarize EventCount =count() by bin(StartTime, 14d), State 17 | -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/NativeKusto-MLKustoNotebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "kusto", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Machine Learning in Azure Data Explorer\n", 20 | "\n", 21 | "These are examples of analytics in Azure Data Explorer using machine learning plugins. \n", 22 | "\n", 23 | "## autocluster plugin\n", 24 | "\n", 25 | "`autocluster` finds common patterns of discrete attributes (dimensions) in the data. It then reduces the results of the original query, whether it's 100 or 100k rows, to a small number of patterns. The plugin was developed to help analyze failures (such as exceptions or crashes) but can potentially work on any filtered data set.\n", 26 | "\n", 27 | "More info: https://docs.microsoft.com/en-us/azure/data-explorer/kusto/query/autoclusterplugin" 28 | ], 29 | "metadata": { 30 | "azdata_cell_guid": "b2b4d405-bd06-470a-a296-b02cbd9ce757" 31 | } 32 | }, 33 | { 34 | "cell_type": "code", 35 | "source": [ 36 | "StormEvents \n", 37 | "| where monthofyear(StartTime) == 5\n", 38 | "| extend Damage = iff(DamageCrops + DamageProperty > 0 , \"YES\" , \"NO\")\n", 39 | "| project State , EventType , Damage\n", 40 | "| evaluate autocluster(0.6, \"~\", \"~\", \"*\")" 41 | ], 42 | "metadata": { 43 | "azdata_cell_guid": "77bb97a3-5da9-4b26-922a-1ca41cd4b844", 44 | "tags": [] 45 | }, 46 | "outputs": [ 47 | { 48 | "output_type": "display_data", 49 | "data": { 50 | "text/html": "Commands completed successfully." 51 | }, 52 | "metadata": {} 53 | }, 54 | { 55 | "output_type": "display_data", 56 | "data": { 57 | "text/html": "Total execution time: 00:00:00.118" 58 | }, 59 | "metadata": {} 60 | }, 61 | { 62 | "output_type": "execute_result", 63 | "metadata": {}, 64 | "execution_count": 1, 65 | "data": { 66 | "application/vnd.dataresource+json": { 67 | "schema": { 68 | "fields": [ 69 | { 70 | "name": "SegmentId" 71 | }, 72 | { 73 | "name": "Count" 74 | }, 75 | { 76 | "name": "Percent" 77 | }, 78 | { 79 | "name": "State" 80 | }, 81 | { 82 | "name": "EventType" 83 | }, 84 | { 85 | "name": "Damage" 86 | } 87 | ] 88 | }, 89 | "data": [ 90 | { 91 | "0": "0", 92 | "1": "2278", 93 | "2": "38.71515975526852", 94 | "3": "*", 95 | "4": "Hail", 96 | "5": "NO" 97 | }, 98 | { 99 | "0": "1", 100 | "1": "512", 101 | "2": "8.701563562202583", 102 | "3": "*", 103 | "4": "Thunderstorm Wind", 104 | "5": "YES" 105 | }, 106 | { 107 | "0": "2", 108 | "1": "898", 109 | "2": "15.261726716519375", 110 | "3": "TEXAS", 111 | "4": "*", 112 | "5": "*" 113 | } 114 | ] 115 | }, 116 | "text/html": [ 117 | "", 118 | "", 119 | "", 120 | "", 121 | "", 122 | "
SegmentIdCountPercentStateEventTypeDamage
0227838.71515975526852*HailNO
15128.701563562202583*Thunderstorm WindYES
289815.261726716519375TEXAS**
" 123 | ] 124 | } 125 | } 126 | ], 127 | "execution_count": 1 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "source": [ 132 | "## diffpatterns plugin\n", 133 | "\n", 134 | "Compares two data sets of the same structure and finds patterns of discrete attributes (dimensions) that characterize differences between the two data sets. Diffpatterns was developed to help analyze failures (for example, by comparing failures to non-failures in a given time frame), but can potentially find differences between any two data sets of the same structure.\n", 135 | "\n", 136 | "More info: https://docs.microsoft.com/en-us/azure/data-explorer/kusto/query/diffpatternsplugin" 137 | ], 138 | "metadata": { 139 | "azdata_cell_guid": "a9393713-3701-4cf6-8425-ff2d5ddf9f1b" 140 | } 141 | }, 142 | { 143 | "cell_type": "code", 144 | "source": [ 145 | "StormEvents \n", 146 | "| where monthofyear(StartTime) == 5\n", 147 | "| extend Damage = iff(DamageCrops + DamageProperty > 0 , 1 , 0)\n", 148 | "| project State , EventType , Source , Damage, DamageCrops\n", 149 | "| evaluate diffpatterns(Damage, \"0\", \"1\" )" 150 | ], 151 | "metadata": { 152 | "azdata_cell_guid": "5afe08f3-71dd-4caa-82ea-105e691ae92f", 153 | "tags": [] 154 | }, 155 | "outputs": [], 156 | "execution_count": null 157 | } 158 | ] 159 | } -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/PythonNotebook.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/PythonNotebook.gif -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/SampleSQLNotebook-ExecSP.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Create a procedure and calling a procedure in SQL Notebook\r\n", 20 | "\r\n", 21 | "_Last updated: 2020.10.16\\\r\n", 22 | "Author: Julie Koesmarno ([@MsSQLGirl](http://twitter.com/MsSQLGirl))_\r\n", 23 | "\r\n", 24 | "This notebook will create a stored procedure in your designated database. After the creation, you can execute the stored procedure.\r\n", 25 | "\r\n", 26 | "At the end of this notebook, there is a clean up script that will remove the stored procedure." 27 | ], 28 | "metadata": { 29 | "azdata_cell_guid": "3b2d78b9-4f99-48e7-9c53-f12482ed777c" 30 | } 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "source": [ 35 | "## Change a database (optional)" 36 | ], 37 | "metadata": { 38 | "azdata_cell_guid": "57c99ddb-eb82-40a7-8164-e304f03ec228" 39 | } 40 | }, 41 | { 42 | "cell_type": "code", 43 | "source": [ 44 | "USE Keep_CalmAndCarryOn\r\n", 45 | "GO\r\n", 46 | "" 47 | ], 48 | "metadata": { 49 | "azdata_cell_guid": "d5e035b0-2f57-42c0-aae6-90ca57928703" 50 | }, 51 | "outputs": [], 52 | "execution_count": null 53 | }, 54 | { 55 | "cell_type": "markdown", 56 | "source": [ 57 | "## Create a stored procedure\r\n", 58 | "This stored procedure will add a number of days (`@DayNumber`) to a specified date (`@Date`), and return a result set with one row containing the new date.\r\n", 59 | "" 60 | ], 61 | "metadata": { 62 | "azdata_cell_guid": "f512f59f-d4ff-42b1-9e5f-bfd6f553d7ac" 63 | } 64 | }, 65 | { 66 | "cell_type": "code", 67 | "source": [ 68 | "CREATE PROCEDURE dbo.usp_AddDayToADate\r\n", 69 | "(\r\n", 70 | " @DayNumber INT = 1,\r\n", 71 | " @Date DATE = NULL\r\n", 72 | ") AS\r\n", 73 | "BEGIN\r\n", 74 | " SELECT DATEADD(DAY, @DayNumber, ISNULL(@Date, GETDATE())) AS NewDate;\r\n", 75 | "END;" 76 | ], 77 | "metadata": { 78 | "azdata_cell_guid": "21c00303-9f8b-44bd-8f03-1f1b95068545" 79 | }, 80 | "outputs": [ 81 | { 82 | "output_type": "display_data", 83 | "data": { 84 | "text/html": "Commands completed successfully." 85 | }, 86 | "metadata": {} 87 | }, 88 | { 89 | "output_type": "display_data", 90 | "data": { 91 | "text/html": "Total execution time: 00:00:00.025" 92 | }, 93 | "metadata": {} 94 | } 95 | ], 96 | "execution_count": 4 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "source": [ 101 | "# Execute the stored procedure\r\n", 102 | "\r\n", 103 | "Because the stored procedure contains default parameter values, you can run the stored procedure without the input parameters.\r\n", 104 | "\r\n", 105 | "This will return tomorrow's date (as at the execution time)" 106 | ], 107 | "metadata": { 108 | "azdata_cell_guid": "80cc4b4b-fe94-4a42-8a76-ebf8c7719051" 109 | } 110 | }, 111 | { 112 | "cell_type": "code", 113 | "source": [ 114 | "EXEC dbo.usp_AddDayToADate " 115 | ], 116 | "metadata": { 117 | "azdata_cell_guid": "9320f891-bd9a-42c1-bf80-cf2e4d67cba8", 118 | "language": "markdown" 119 | }, 120 | "outputs": [ 121 | { 122 | "output_type": "display_data", 123 | "data": { 124 | "text/html": "(1 row affected)" 125 | }, 126 | "metadata": {} 127 | }, 128 | { 129 | "output_type": "display_data", 130 | "data": { 131 | "text/html": "Total execution time: 00:00:00.036" 132 | }, 133 | "metadata": {} 134 | }, 135 | { 136 | "output_type": "execute_result", 137 | "execution_count": 6, 138 | "data": { 139 | "application/vnd.dataresource+json": { 140 | "schema": { 141 | "fields": [ 142 | { 143 | "name": "NewDate" 144 | } 145 | ] 146 | }, 147 | "data": [ 148 | { 149 | "0": "2020-10-19" 150 | } 151 | ] 152 | }, 153 | "text/html": "
NewDate
2020-10-19
" 154 | }, 155 | "metadata": {} 156 | } 157 | ], 158 | "execution_count": 6 159 | }, 160 | { 161 | "cell_type": "markdown", 162 | "source": [ 163 | "## Execute the stored procedure with input parameters\r\n", 164 | "Add 7 days to Jan 1, 2020." 165 | ], 166 | "metadata": { 167 | "azdata_cell_guid": "f9f6c9ad-089c-41e0-b849-d24e6349ee3a" 168 | } 169 | }, 170 | { 171 | "cell_type": "code", 172 | "source": [ 173 | "DECLARE @MyDayNumber INT = 7;\r\n", 174 | "DECLARE @MyDate DATE = '2020-01-01';\r\n", 175 | "\r\n", 176 | "EXEC dbo.usp_AddDayToADate @MyDayNumber, @MyDate;" 177 | ], 178 | "metadata": { 179 | "azdata_cell_guid": "8fdfe64b-9953-4638-8d2e-46775c2dc6a1" 180 | }, 181 | "outputs": [ 182 | { 183 | "output_type": "display_data", 184 | "data": { 185 | "text/html": "(1 row affected)" 186 | }, 187 | "metadata": {} 188 | }, 189 | { 190 | "output_type": "display_data", 191 | "data": { 192 | "text/html": "Total execution time: 00:00:00.036" 193 | }, 194 | "metadata": {} 195 | }, 196 | { 197 | "output_type": "execute_result", 198 | "execution_count": 7, 199 | "data": { 200 | "application/vnd.dataresource+json": { 201 | "schema": { 202 | "fields": [ 203 | { 204 | "name": "NewDate" 205 | } 206 | ] 207 | }, 208 | "data": [ 209 | { 210 | "0": "2020-01-08" 211 | } 212 | ] 213 | }, 214 | "text/html": "
NewDate
2020-01-08
" 215 | }, 216 | "metadata": {} 217 | } 218 | ], 219 | "execution_count": 7 220 | }, 221 | { 222 | "cell_type": "markdown", 223 | "source": [ 224 | "## Clean up\r\n", 225 | "\r\n", 226 | "To clean up, uncomment below and run it." 227 | ], 228 | "metadata": { 229 | "azdata_cell_guid": "68eb9c25-e2eb-4cdf-94dc-bfbd283f16a5" 230 | } 231 | }, 232 | { 233 | "cell_type": "code", 234 | "source": [ 235 | "-- DROP PROCEDURE dbo.usp_AddDayToADate;" 236 | ], 237 | "metadata": { 238 | "azdata_cell_guid": "a8882927-b8de-4d57-a66d-f3d0bdd4e81d" 239 | }, 240 | "outputs": [ 241 | { 242 | "output_type": "display_data", 243 | "data": { 244 | "text/html": "Commands completed successfully." 245 | }, 246 | "metadata": {} 247 | }, 248 | { 249 | "output_type": "display_data", 250 | "data": { 251 | "text/html": "Total execution time: 00:00:00.033" 252 | }, 253 | "metadata": {} 254 | } 255 | ], 256 | "execution_count": 8 257 | } 258 | ] 259 | } -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/ADSHeartNotebooks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/ADSHeartNotebooks.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/AzureDataStudioLogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/AzureDataStudioLogo.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/DBA.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/DBA.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/DataAnalyst.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/DataAnalyst.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/DataEngineer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/DataEngineer.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/DataScientist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/DataScientist.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/DatabaseDeveloper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/DatabaseDeveloper.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/SQLNotebookExample3.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/SQLNotebookExample3.gif -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/images/markdown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/f92b6bf567ab11a5e589640ca1ce0ec0f255d117/Simple Demo/Sample Notebooks - Data Analysis/images/markdown.png -------------------------------------------------------------------------------- /Simple Demo/Sample Notebooks - Data Analysis/readme.md: -------------------------------------------------------------------------------- 1 | # Notebooks 101 in Azure Data Studio 2 | 3 | Prepared by: Julie Koesmarno (twitter: [@MsSQLGirl](http://twitter.com/MsSQLGirl)) 4 | 5 | [![Azure Data Studio](./images/ADSHeartNotebooks.png)](./images/ADSHeartNotebooks.png "ADS loves Notebooks") 6 | 7 | **http://aka.ms/AzureDataStudio** to learn more about Azure Data Studio and to download it. 8 | 9 | 10 | ## Table of Contents 11 | 12 | 1. [What is a notebook? And why?](#what-is-a-notebook-and-why) 13 | 2. [Fun facts about Notebooks in Azure Data Studio](#fun-facts-about-notebooks-in-azure-data-studio) 14 | 3. [More notebook users](#more-notebook-users) 15 | 4. [Examples](#examples) 16 | 5. [Anatomy](#anatomy) 17 | 6. [Markdown cheatsheet](#markdown-cheatsheet) 18 | 7. [Linking to other parts of notebooks](#linking-to-other-parts-of-notebooks) 19 | 8. [Demo](#demo) 20 | 9. [Notebook use cases](#notebook-use-cases) 21 | 1. [Exploratory / Academic / Collaborative - *original intention*](#exploratory--academic--collaborative---original-intention) 22 | 2. [Broader analysis - *app operations*](#broader-analysis---app-operations) 23 | 3. [DBA Scenarios / examples - *data, automation and DevOps use cases*](#dba-scenarios--examples---data-automation-and-devops-use-cases) 24 | 10. [Useful links for getting started with Notebooks in Azure Data Studio!](#useful-links-for-getting-started-with-notebooks-in-azure-data-studio) 25 | 26 | 27 | 28 | ## What is a notebook? And why? 29 | A *Jupyter* notebook is a tool for interactively developing and presenting Data Science projects ([source](https://towardsdatascience.com/a-beginners-tutorial-to-jupyter-notebooks-1b2f8705888a)). Project *Jupyter* is open source. 30 | 31 | More use cases have landed since then! Most importantly because: 32 | * **documentation** + **code** + **results** + **visualization** can now be in **one place**, and 33 | * the **reproducible framework** around notebooks is beneficial beyond data science. 34 | 35 | More open source contributors developing all sorts of wonderful things that support the operational / workflow side of this! 36 | 37 | **SQL People** can benefit the reproducible data exploratory aspect of notebooks today using Azure Data Studio! 38 | 39 | 40 | 41 | ## Fun facts about Notebooks in Azure Data Studio: 42 | * Sep 2018: Notebook first introduced in Azure Data Studio 43 | * Mar 2019: SQL Notebooks 44 | * Nov 2019: Jupyter Book & PowerShell Notebooks 45 | * Apr 2020: Kqlmagic for Kusto queries in Python notebook 46 | 47 | ## Meet Notebook users 48 | 49 | | Data Scientist | Data Analyst | Data Engineer | DBA | 50 | | --- | --- | --- | --- | 51 | | [![Azure Data Studio](./images/DataScientist.png)](./images/DataScientist.png "Data Scientist") | [![Azure Data Studio](./images/DataAnalyst.png)](./images/DataAnalyst.png "Data Analyst") | [![Azure Data Studio](./images/DataEngineer.png)](./images/DataEngineer.png "Data Engineer") | [![Azure Data Studio](./images/DBA.png)](./images/DBA.png "DBA") | 52 | | **Typical kernels:** | | | | 53 | | Python, R, SQL | SQL, Python, R | Spark, Python, SQL | SQL, PowerShell | 54 | 55 |
56 | 57 | 58 | ## Examples 59 | Simple Reproducible Research example - [click here](azuredatastudio://microsoft.notebook/open?url=https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/master/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/ReproducibleResearch.ipynb) to display it in Azure Data Studio. 60 | 61 | 62 | 63 | ## Anatomy 64 | * Text Cell - Plain text, markdown, HTML, code snippets and more! 65 | * Plain Text **and** *simple* markdown ***formatting*** 66 | * HTML support for advanced ~~complex~~ formatting 67 | * Code format 68 | ~~~SQL 69 | SELECT TOP 10 * FROM [sys].[databases] 70 | ~~~ 71 | * Multimedia / Image as shown above. 72 | 73 | 74 | * Execution Cell - Code and output 75 | * Visualizations / graphics 76 | 77 | > **tip**: \ 78 | > If you are sharing Notebooks to others publicly, ensure that the results don't include sensitive information. 79 | 80 | 81 | 82 | ## Markdown Cheatsheet 83 | ![](./images/markdown.png) 84 | 85 | 86 | ## Linking to other parts of notebooks 87 | 88 | Github Flavored Markdown (GFM) which is what Azure Data Studio adopts, extends the standard Markdown standard so that all Markdown-rendered headers automatically get IDs, which can be linked to, except in comments. Below is a useful description from the [Gitlab](https://docs.gitlab.com/ee/user/markdown.html#header-ids-and-links) page. 89 | 90 | On hover, a link to those IDs becomes visible to make it easier to copy the link to the header to use it somewhere else. 91 | 92 | The IDs are generated from the content of the header according to the following rules: 93 | 94 | 1. All text is converted to lowercase. 95 | 2. All non-word text (such as punctuation or HTML) is removed. 96 | 3. All spaces are converted to hyphens. 97 | 4. Two or more hyphens in a row are converted to one. 98 | 5. If a header with the same ID has already been generated, a unique incrementing number is appended, starting at 1. 99 | 100 | Example: 101 | 102 | ``` 103 | # This header has spaces in it 104 | ## This header has a :thumbsup: in it 105 | # This header has Unicode in it: 한글 106 | ## This header has spaces in it 107 | ### This header has spaces in it 108 | ## This header has 3.5 in it (and parentheses) 109 | 110 | ``` 111 | 112 | Would generate the following link IDs: 113 | 114 | 1. `this-header-has-spaces-in-it` 115 | 2. `this-header-has-a-in-it` 116 | 3. `this-header-has-unicode-in-it-한글` 117 | 4. `this-header-has-spaces-in-it-1` 118 | 5. `this-header-has-spaces-in-it-2` 119 | 6. `this-header-has-3-5-in-it-and-parentheses` 120 | 121 | Note that the emoji processing happens before the header IDs are generated, so the emoji is converted to an image which is then removed from the ID. 122 | 123 | ### Cross Referencing examples 124 | This includes linking to Headers in this notebook, generic parts within this notebook (with `` reference) and other notebook's parts. 125 | 126 | Here's an example of linking to other parts of this notebook: 127 | 1. [Heading: Demo](#demo), which is located immediately below this text cell. 128 | 2. [Heading: Notebooks Use Cases](#notebooks-use-cases), which is located below the Demo text cell. 129 | 3. [Anchor with ID: The Ops Side](#the-ops-side), which is located as part of the Notebook Use Cases text cell. 130 | 4. [Heading: Markdown](#markdown-cheatsheet), which is located above this text cell. 131 | 5. [Top of this notebook](#notebooks-101-in-azure-data-studio), which is located on top of this notebook. 132 | 133 | Here's an example of linking to other part of another notebook ([SimpleSQLNotebook.ipynb](../SimpleSQLNotebook.ipynb)): 134 | 1. [Heading: Get a list of databases](../SimpleSQLNotebook.ipynb#get-a-list-of-databases) 135 | 1. [Anchor with ID: SELECT statement in a text cell](../SimpleSQLNotebook.ipynb#code-format) 136 | 1. [Another option for launching Azure Data Studio to open SimpleSQLNotebook.ipynb copy on GitHub](azuredatastudio://microsoft.notebook/open?url=https://raw.githubusercontent.com/MsSQLGirl/jubilant-data-wizards/main/Simple%20Demo/SimpleSQLNotebook.ipynb#get-a-list-of-databases) 137 | 138 | 139 | 140 | ## Demo 141 | 1. Navigating the controls in Azure Data Studio. \ 142 | Anatomy, Toolbar, Markdown Toolbar, Notebook Viewlet. 143 | 2. Creating a SQL notebook \ 144 | Text, Code, Results, Charting 145 | 3. Creating a Powershell notebook 146 | 4. Creating a book \ 147 | Create a book and searching 148 | 5. Container deployment notebook 149 | 150 | 151 | 152 | ## Notebooks Use Cases 153 | 154 | Notebook journey starts with **Exploratory / Academic / Collaborative / Reproducible** nature, then it's used for **broader analysis for app operations**. We are now seeing more adoption of Notebook in **DBA and data work (management / analysis / visualization)**. 155 | 156 | ### Exploratory / Academic / Collaborative - *original intention* 157 | * [Simple Reproducible Research example on Github](https://github.com/MsSQLGirl/jubilant-data-wizards/blob/master/Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/ReproducibleResearch.ipynb) 158 | * [Typical Data Science example on GitHub](https://github.com/rhiever/Data-Analysis-and-Machine-Learning-Projects/blob/master/example-data-science-notebook/Example%20Machine%20Learning%20Notebook.ipynb) | [View this in Azure Data Studio](azuredatastudio://microsoft.notebook/open?url=https://raw.githubusercontent.com/rhiever/Data-Analysis-and-Machine-Learning-Projects/master/example-data-science-notebook/Example%20Machine%20Learning%20Notebook.ipynb) 159 | * [NBA Games in Notebooks](https://github.com/DunderData/Tutorials/blob/master/Data%20Analysis%20and%20Visualization%20Tutorials/Matplotlib%20Tutorials/NBA%20Full%20Game%20Animation.ipynb) 160 | 161 | ### Broader analysis - *app operations* 162 | * [Malicious detection](https://hub.gke.mybinder.org/user/johnlatwc-shared-psdjmbbn/notebooks/notebooks/Malware%20Decode%20Demo.ipynb) 163 | * [Malware Powershell shellcode analysis by @JohnLaTwC](https://github.com/JohnLaTwC/Shared/blob/master/notebooks/Malware%20PowerShell%20shellcode%20analysis.ipynb) | [View this in Azure Data Studio](azuredatastudio://microsoft.notebook/open?url=https://raw.githubusercontent.com/JohnLaTwC/Shared/master/notebooks/Malware%20PowerShell%20shellcode%20analysis.ipynb) 164 | 165 | 166 | ### DBA Scenarios / examples - *data, automation and DevOps use cases* 167 | * Runbooks: [SQL Assessment](https://github.com/microsoft/sql-server-samples/blob/master/samples/manage/sql-assessment-api/notebooks/SQLAssessmentAPIQuickStartNotebook.ipynb) 168 | * Troubleshooting: 169 | * Jupyter Book created based on Glenn Berry's Diagnostic Queries - check out his [blog](https://glennsqlperformance.com/resources/) or from [dbatools GitHub](https://github.com/sqlcollaborative/dbatools/tree/development/bin/diagnosticquery). 170 | * Here's an example for [SQL Server 2019 Diagnostic Query in SQL](https://www.dropbox.com/s/k1vauzxxhyh1fnb/SQL%20Server%202019%20Diagnostic%20Information%20Queries.sql?dl=0) 171 | * **SQL Server 2019 Guide Jupyter Book** -> From Command Palette (Ctrl + Shift + P), type "Jupyter Books: SQL Server 2019 Guide". 172 | * Your own! 173 | * Notebooks for **deploying SQL Server containers** -> From Command Palette (Ctrl + Shift + P), type "Deployment: New Deployment..." 174 | * Change Management workflow 175 | 176 | > The "Ops" side of DevOps 177 | > * Runbook automation 178 | > * Ops Excellence ~ Software Engineering Excellence (version control, code review and more!) 179 | > * Incident Response Auditing 180 | 181 | ## Useful links for getting started with Notebooks in Azure Data Studio! 182 | 183 | * Azure Data Studio Notebook Overview https://docs.microsoft.com/sql/big-data-cluster/notebooks-guidance 184 | * Rob Sewell - Incident Response Index https://sqldbawithabeard.com/2019/11/21/dynamically-creating-azure-data-studio-notebooks-with-powershell-for-an-incident-response-index-notebook/ 185 | * Emanuele Meazzo - SQL Diagnostic Jupyter Book https://tsql.tech/the-sql-diagnostic-jupyter-book/ 186 | * Azure Data Studio Release Notes: https://docs.microsoft.com/sql/azure-data-studio/release-notes-azure-data-studio 187 | * Simplify DevOps with Jupyter Notebook: https://dev.to/amit1rrr/simplify-devops-with-jupyter-notebook-e33 188 | * Useful samples: 189 | * SQL Assessment API on GitHub: https://github.com/microsoft/sql-server-samples/tree/master/samples/manage/sql-assessment-api/notebooks 190 | 191 | -------------------------------------------------------------------------------- /Simple Demo/SimplePythonNotebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "python3", 5 | "display_name": "Python 3", 6 | "language": "python" 7 | }, 8 | "language_info": { 9 | "name": "python", 10 | "version": "3.6.6", 11 | "mimetype": "text/x-python", 12 | "codemirror_mode": { 13 | "name": "ipython", 14 | "version": 3 15 | }, 16 | "pygments_lexer": "ipython3", 17 | "nbconvert_exporter": "python", 18 | "file_extension": ".py" 19 | } 20 | }, 21 | "nbformat_minor": 2, 22 | "nbformat": 4, 23 | "cells": [ 24 | { 25 | "cell_type": "code", 26 | "source": [ 27 | "a = 10\r\n", 28 | "b = 5" 29 | ], 30 | "metadata": { 31 | "azdata_cell_guid": "3726ff67-9b0c-4955-a478-f84b5a8a41c9", 32 | "tags": [ 33 | "parameters" 34 | ] 35 | }, 36 | "outputs": [], 37 | "execution_count": 1 38 | }, 39 | { 40 | "cell_type": "code", 41 | "source": [ 42 | "a + b" 43 | ], 44 | "metadata": { 45 | "azdata_cell_guid": "23ee68b9-02e2-4c14-893b-a29c2a27e7cf" 46 | }, 47 | "outputs": [ 48 | { 49 | "data": { 50 | "text/plain": "15" 51 | }, 52 | "metadata": {}, 53 | "execution_count": 2, 54 | "output_type": "execute_result" 55 | } 56 | ], 57 | "execution_count": 2 58 | } 59 | ] 60 | } -------------------------------------------------------------------------------- /Simple Demo/Troubleshooting/Deadlock/Deadlock-1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Simulating a deadlock in Azure SQL database\r\n", 20 | "\r\n", 21 | "Change Kernel to \"SQL\" and attach to an Azure SQL database. \r\n", 22 | "\r\n", 23 | "> **Note:** You'll need [Deadlock-2.ipynb](./Deadlock-2.ipynb) as part of this deadlock simluation.\r\n", 24 | "\r\n", 25 | "There are three steps:\r\n", 26 | "1. [Create tables used for deadlock simulation](#step-1-create-tables-used-for-deadlock-simulation)\r\n", 27 | "2. [Start deadlock simulation](#step-2-start-deadlock-simulation)\r\n", 28 | "3. In parallel, quickly start the code cell in [Deadlock-2.ipynb](./Deadlock-2.ipynb#step-3-run-this-quickly-after-step-2-is-done)\r\n", 29 | "4. [Clean up the tables](#step-4-clean-up-tables)\r\n", 30 | "\r\n", 31 | "\r\n", 32 | "" 33 | ], 34 | "metadata": { 35 | "azdata_cell_guid": "ec35dcc5-444e-4f5e-85b7-ced9a3e4e3f5" 36 | } 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "source": [ 41 | "## Step 1. Create tables used for deadlock simulation" 42 | ], 43 | "metadata": { 44 | "azdata_cell_guid": "bb860ada-c696-4961-9585-5748c32e566d" 45 | } 46 | }, 47 | { 48 | "cell_type": "code", 49 | "source": [ 50 | "-- ==================================\r\n", 51 | "-- Create the tables.\r\n", 52 | "-- ==================================\r\n", 53 | "\r\n", 54 | "CREATE TABLE [dbo].[Person](\r\n", 55 | " [Id] INT IDENTITY(1,1) NOT NULL,\r\n", 56 | " [FirstName] VARCHAR(20) NOT NULL,\r\n", 57 | " [BusinessEntityID] INT NOT NULL,\r\n", 58 | " CONSTRAINT [PK_Person] PRIMARY KEY CLUSTERED\r\n", 59 | " (\r\n", 60 | " [Id] ASC\r\n", 61 | " )\r\n", 62 | ")\r\n", 63 | "\r\n", 64 | "CREATE TABLE [dbo].[PersonPhone](\r\n", 65 | " [PhoneNumber] VARCHAR(50) NOT NULL,\r\n", 66 | " [BusinessEntityID] INT NOT NULL,\r\n", 67 | " CONSTRAINT [PK_PersonPhone] PRIMARY KEY CLUSTERED\r\n", 68 | " (\r\n", 69 | " [PhoneNumber] ASC\r\n", 70 | "))\r\n", 71 | "\r\n", 72 | "-- ==================================\r\n", 73 | "-- Add some rows\r\n", 74 | "-- ==================================\r\n", 75 | "\r\n", 76 | "INSERT INTO [PersonPhone] VALUES ( '999-555-1212',1);\r\n", 77 | "INSERT INTO [Person] VALUES ( 'Chris',1);\r\n", 78 | "\r\n", 79 | "-- ==================================" 80 | ], 81 | "metadata": { 82 | "azdata_cell_guid": "b107e24d-69fd-4489-ae5a-2c590fc90d75" 83 | }, 84 | "outputs": [ 85 | { 86 | "output_type": "display_data", 87 | "data": { 88 | "text/html": "(1 row affected)" 89 | }, 90 | "metadata": {} 91 | }, { 92 | "output_type": "display_data", 93 | "data": { 94 | "text/html": "(1 row affected)" 95 | }, 96 | "metadata": {} 97 | }, { 98 | "output_type": "display_data", 99 | "data": { 100 | "text/html": "Total execution time: 00:00:00.109" 101 | }, 102 | "metadata": {} 103 | } 104 | ], 105 | "execution_count": null 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "source": [ 110 | "## Step 2. Start deadlock simulation\r\n", 111 | "" 112 | ], 113 | "metadata": { 114 | "azdata_cell_guid": "48300021-4344-45d9-a2da-e5e89f013411" 115 | } 116 | }, 117 | { 118 | "cell_type": "code", 119 | "source": [ 120 | "SET LOCK_TIMEOUT -1;\n", 121 | "\n", 122 | "SELECT GETDATE() AS [Query1Start] \r\n", 123 | "\r\n", 124 | "BEGIN TRANSACTION\r\n", 125 | "\r\n", 126 | "UPDATE [PersonPhone] SET PhoneNumber = '999-555-1212' WHERE [BusinessEntityID] = 1\r\n", 127 | "WAITFOR DELAY '00:00:15'\r\n", 128 | "UPDATE [Person] SET [FirstName] = 'Chris' WHERE [BusinessEntityID] = 1\r\n", 129 | "\r\n", 130 | "ROLLBACK TRANSACTION\r\n", 131 | "\r\n", 132 | "SELECT GETDATE() AS [Query1End] " 133 | ], 134 | "metadata": { 135 | "azdata_cell_guid": "b05efdce-ccad-444b-9990-21f70583aba7" 136 | }, 137 | "outputs": [ 138 | { 139 | "output_type": "display_data", 140 | "data": { 141 | "text/html": "(1 row affected)" 142 | }, 143 | "metadata": {} 144 | }, { 145 | "output_type": "display_data", 146 | "data": { 147 | "text/html": "(1 row affected)" 148 | }, 149 | "metadata": {} 150 | }, { 151 | "output_type": "display_data", 152 | "data": { 153 | "text/html": "(1 row affected)" 154 | }, 155 | "metadata": {} 156 | }, { 157 | "output_type": "display_data", 158 | "data": { 159 | "text/html": "(1 row affected)" 160 | }, 161 | "metadata": {} 162 | }, { 163 | "output_type": "display_data", 164 | "data": { 165 | "text/html": "Total execution time: 00:00:18.500" 166 | }, 167 | "metadata": {} 168 | }, { 169 | "output_type": "execute_result", 170 | "metadata": {}, 171 | "execution_count": 2, 172 | "data": { 173 | "application/vnd.dataresource+json": { 174 | "schema": { 175 | "fields": [ 176 | { 177 | "name": "Query1Start" 178 | } 179 | ] 180 | }, 181 | "data": [ 182 | { 183 | "0": "2021-03-08 20:27:09.593" 184 | } 185 | ] 186 | }, 187 | "text/html": [ 188 | "", 189 | "", 190 | "", 191 | "
Query1Start
2021-03-08 20:27:09.593
" 192 | ] 193 | } 194 | }, { 195 | "output_type": "execute_result", 196 | "metadata": {}, 197 | "execution_count": 2, 198 | "data": { 199 | "application/vnd.dataresource+json": { 200 | "schema": { 201 | "fields": [ 202 | { 203 | "name": "Query1End" 204 | } 205 | ] 206 | }, 207 | "data": [ 208 | { 209 | "0": "2021-03-08 20:27:27.963" 210 | } 211 | ] 212 | }, 213 | "text/html": [ 214 | "", 215 | "", 216 | "", 217 | "
Query1End
2021-03-08 20:27:27.963
" 218 | ] 219 | } 220 | } 221 | ], 222 | "execution_count": null 223 | }, 224 | { 225 | "cell_type": "markdown", 226 | "source": [ 227 | "> Quickly jump to [Deadlock-2.ipynb](./Deadlock-2.ipynb) for **_Step 3_** and run it, before the above code cell finishes." 228 | ], 229 | "metadata": { 230 | "azdata_cell_guid": "c1f566a4-7365-4d55-a712-26f6fdb002e7" 231 | } 232 | }, 233 | { 234 | "cell_type": "markdown", 235 | "source": [ 236 | "## Step 4. Clean up tables" 237 | ], 238 | "metadata": { 239 | "azdata_cell_guid": "8a80c19b-54d9-4c3b-b8ba-5924d9fee30b" 240 | } 241 | }, 242 | { 243 | "cell_type": "code", 244 | "source": [ 245 | "DROP TABLE [dbo].[Person]\r\n", 246 | "DROP TABLE [dbo].[PersonPhone]" 247 | ], 248 | "metadata": { 249 | "azdata_cell_guid": "a68e2330-3682-4dfb-bce6-118611b1aee3" 250 | }, 251 | "outputs": [], 252 | "execution_count": null 253 | } 254 | ] 255 | } -------------------------------------------------------------------------------- /Simple Demo/Troubleshooting/Deadlock/Deadlock-2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# This is the second part of Deadlock simulation\r\n", 20 | "\r\n", 21 | "> **Note:** You'll need to have run [Deadlock-1.ipynb](./Deadlock-1.ipynb#step-2-start-deadlock-simulation) as part of this deadlock simluation before.\r\n", 22 | "\r\n", 23 | "## Step 3. Run this quickly after Step 2 is done" 24 | ], 25 | "metadata": { 26 | "azdata_cell_guid": "b57dad6d-77cf-4bb7-a104-0e5cae7a90dc" 27 | } 28 | }, 29 | { 30 | "cell_type": "code", 31 | "source": [ 32 | "SET LOCK_TIMEOUT -1;\n", 33 | "\r\n", 34 | "BEGIN TRANSACTION\r\n", 35 | "\r\n", 36 | "UPDATE [Person] SET [FirstName] = 'Chris' WHERE [BusinessEntityID] = 1\r\n", 37 | "\r\n", 38 | "UPDATE [PersonPhone] SET PhoneNumber = '999-555-1212' WHERE [BusinessEntityID] = 1\r\n", 39 | "\r\n", 40 | "WAITFOR DELAY '00:00:03'\r\n", 41 | "\r\n", 42 | "\r\n", 43 | "" 44 | ], 45 | "metadata": { 46 | "azdata_cell_guid": "19ce1fd5-b95c-4a65-a2dc-546ad8e8f227", 47 | "tags": [] 48 | }, 49 | "outputs": [ 50 | { 51 | "output_type": "display_data", 52 | "data": { 53 | "text/html": "(1 row affected)" 54 | }, 55 | "metadata": {} 56 | }, { 57 | "output_type": "error", 58 | "evalue": "Msg 1205, Level 13, State 51, Line 7\r\nTransaction (Process ID 77) was deadlocked on lock resources with another process and has been chosen as the deadlock victim. Rerun the transaction.", 59 | "ename": "", 60 | "traceback": [] 61 | }, { 62 | "output_type": "display_data", 63 | "data": { 64 | "text/html": "Total execution time: 00:00:15.545" 65 | }, 66 | "metadata": {} 67 | } 68 | ], 69 | "execution_count": 7 70 | }, 71 | { 72 | "cell_type": "code", 73 | "source": [ 74 | "COMMIT TRAN" 75 | ], 76 | "metadata": { 77 | "azdata_cell_guid": "bc465cdc-6ff4-4a35-b727-338853353da5" 78 | }, 79 | "outputs": [ 80 | { 81 | "output_type": "error", 82 | "evalue": "Msg 3902, Level 16, State 1, Line 1\r\nThe COMMIT TRANSACTION request has no corresponding BEGIN TRANSACTION.", 83 | "ename": "", 84 | "traceback": [] 85 | }, { 86 | "output_type": "display_data", 87 | "data": { 88 | "text/html": "Total execution time: 00:00:00.031" 89 | }, 90 | "metadata": {} 91 | } 92 | ], 93 | "execution_count": 8 94 | } 95 | ] 96 | } -------------------------------------------------------------------------------- /Useful Notebooks/ADSKeyboardShortcuts.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Azure Data Studio Keyboard shortcuts cheatsheet\r\n", 20 | "\r\n", 21 | "These are my favorite Azure Data Studio shortcuts :) \r\n", 22 | "\r\n", 23 | "#### Useful when Navigating ADS\r\n", 24 | "\r\n", 25 | "| Keyboard shortcuts | Description |\r\n", 26 | "| :-- | :-- |\r\n", 27 | "| `Ctrl` + `Shift` + `P` | To launch a command pallette to execute other commands, such as *Developer Reload Window* | \r\n", 28 | "| `Ctrl` + `P` | To quickly look up and open a file based on some characters in the file name that are on the folder opened |\r\n", 29 | "| `Ctrl` + `K` + `S` | Look up Keyboard shortcuts |\r\n", 30 | "| `Ctrl` + ` | Open terminal |\r\n", 31 | "\r\n", 32 | "#### Useful when Editing Notebooks\r\n", 33 | "\r\n", 34 | "| Keyboard shortcuts | Description |\r\n", 35 | "| :-- | :-- |\r\n", 36 | "| `Esc` | To quickly exit out of edited notebook |\r\n", 37 | "| `Alt` + `Z` | Toggle wordwrap when editing markdowon / query editor |\r\n", 38 | "| `Ctrl` + `S` | Save file |\r\n", 39 | "| `Ctrl` + `Z` | Undo, while on edit mode in notebook| \r\n", 40 | "| `Ctrl` + `Y` | Redo, While on edit mode in notebook | \r\n", 41 | "| `Ctrl` + `\\` | Split window |\r\n", 42 | "| `↑` or `↓` or `Home` or `End` | Go up and down Notebook |\r\n", 43 | "| `Ctrl` + `+` | Zoom in | \r\n", 44 | "| `Ctrl` + `-` | Zoom out | \r\n", 45 | "| `Ctrl` + `0` | Reset | \r\n", 46 | "\r\n", 47 | "#### Activity Bar shortcuts\r\n", 48 | "\r\n", 49 | "| Keyboard shortcuts | Description |\r\n", 50 | "| :-- | :-- |\r\n", 51 | "| `Ctrl` + `Shift` + `D` | To launch the **Connections Viewlet** where you can see browse ADX clusters and other SQL servers |\r\n", 52 | "| `Ctrl` + `Shift` + `F` | To launch the **Search Viewlet** where you can search for files |\r\n", 53 | "| `Ctrl` + `Shift` + `E` | To launch the **Explorer Viewlet** where you can see Opened Files, Folder / workspace file structure, and Timeline |\r\n", 54 | "| `Ctrl` + `Shift` + `G` | To launch the **Source Control Viewlet** where you can view list of files changed and do manual git actions (outside of SimplePR) |\r\n", 55 | "| `Ctrl` + `Shift` + `X` | To launch the **Extensions Viewlet** where you can view a list of extensions, install the from market gallery or install vsix files from your local folder |\r\n", 56 | "\r\n", 57 | "\r\n", 58 | "### Bonus: Where are my opened files?\r\n", 59 | "\r\n", 60 | "When you have many files opened, you can use \r\n", 61 | "* `Ctrl` + `P` to see all opened files.\r\n", 62 | "* `Ctrl` + `Shift` + `E` to launch the Explorer extension to see the **Opened Files** tab.\r\n", 63 | "" 64 | ], 65 | "metadata": { 66 | "azdata_cell_guid": "9ca7ac0f-e36c-4e7d-b3e9-c81166a60efc" 67 | } 68 | } 69 | ] 70 | } -------------------------------------------------------------------------------- /Useful Notebooks/ADSMarkdownCheatsheet.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "SQL", 5 | "display_name": "SQL", 6 | "language": "sql" 7 | }, 8 | "language_info": { 9 | "name": "sql", 10 | "version": "" 11 | } 12 | }, 13 | "nbformat_minor": 2, 14 | "nbformat": 4, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "source": [ 19 | "# Markdown Cheatsheet\r\n", 20 | "\r\n", 21 | "I'm not sure who created this cheatsheet but it's been used quite a bit in my team :D \r\n", 22 | "\r\n", 23 | "![](../Simple%20Demo/Sample%20Notebooks%20-%20Data%20Analysis/images/markdown.png)\r\n", 24 | "\r\n", 25 | "\r\n", 26 | "\r\n", 27 | "> **Trivia**: Did you know that Content Writers of Microsoft Docs use markdown language all up to provide \r\n", 28 | "instructions to external customers on how to use products that we ship such as \r\n", 29 | "[SQL Server](https://docs.microsoft.com/sql/sql-server/what-s-new-in-sql-server-ver15), \r\n", 30 | "[Azure SQL](https://docs.microsoft.com/sql/azure-data-studio/what-is-azure-data-studio), and \r\n", 31 | "[Azure Data Studio](http://aka.ms/AzureDataStudio)?\r\n", 32 | "\r\n", 33 | "#### Table creation\r\n", 34 | "This is a table example when using markdown. \r\n", 35 | "> **Tip**: Only two dashes ( `--` ) are needed to form a table. \r\n", 36 | "\r\n", 37 | "```\r\n", 38 | "| Tables | Are | Cool |\r\n", 39 | "|----------|:-------------:|------:|\r\n", 40 | "| col 1 is | left-aligned | 1600 |\r\n", 41 | "| col 2 is | centered | 12 |\r\n", 42 | "| col 3 is | right-aligned | 1 |\r\n", 43 | "```\r\n", 44 | "\r\n", 45 | "Will look like this:\r\n", 46 | "\r\n", 47 | "| Tables | Are | Cool |\r\n", 48 | "|----------|:-------------:|------:|\r\n", 49 | "| col 1 is | left-aligned | 1600 |\r\n", 50 | "| col 2 is | centered | 12 |\r\n", 51 | "| col 3 is | right-aligned | 1 |\r\n", 52 | "\r\n", 53 | "\r\n", 54 | "#### Complex list example\r\n", 55 | "1. First level part 1\r\n", 56 | " 1. sub ordered list item 1 \r\n", 57 | " * hanging bullet level 1\r\n", 58 | " * hanging bullet level 2\r\n", 59 | " 2. sub ordered list item 2\r\n", 60 | " 1. use number instead \r\n", 61 | "2. First level part 2\r\n", 62 | "\r\n", 63 | "\r\n", 64 | "> **Tip**: To break a paragraph into a new line, add two empty spaces ` `. You do not need this when breaking for a new ordered list item or a new unordered list item. \r\n", 65 | "\r\n", 66 | "Markdown with two empty spaces after \"Line 1\": \r\n", 67 | "```\r\n", 68 | "Line 1 \r\n", 69 | "Line 2\r\n", 70 | "```\r\n", 71 | "\r\n", 72 | "What it looks like: \r\n", 73 | "Line 1 \r\n", 74 | "Line 2" 75 | ], 76 | "metadata": { 77 | "azdata_cell_guid": "229f7c5c-dba5-4453-9067-736799696f29" 78 | } 79 | } 80 | ] 81 | } -------------------------------------------------------------------------------- /Useful Notebooks/Using_ConvertTo-SQLNoteBook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "kernelspec": { 4 | "name": "powershell", 5 | "display_name": "PowerShell" 6 | }, 7 | "language_info": { 8 | "name": "powershell", 9 | "codemirror_mode": "shell", 10 | "mimetype": "text/x-sh", 11 | "file_extension": ".ps1" 12 | } 13 | }, 14 | "nbformat_minor": 2, 15 | "nbformat": 4, 16 | "cells": [ 17 | { 18 | "cell_type": "markdown", 19 | "source": [ 20 | " ➕\r\n", 21 | " ➕\r\n", 22 | " = ❤\r\n", 23 | "# PowerShell to convert .SQL files into Notebooks" 24 | ], 25 | "metadata": { 26 | "azdata_cell_guid": "42ce5c87-0ea5-429e-9d0a-6450eadb2a4d" 27 | } 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "source": [ 32 | "```\r\n", 33 | "Shared by: Aaron Nelson.\\\r\n", 34 | "Modified by: Julie Koesmarno.\r\n", 35 | "```\r\n", 36 | "\r\n", 37 | "(optional step, for demo purposes)\r\n", 38 | "## Make a directory to store some .SQL files" 39 | ], 40 | "metadata": { 41 | "azdata_cell_guid": "9130e7bd-c976-4fe9-94db-7c26eb1360dd" 42 | } 43 | }, 44 | { 45 | "cell_type": "code", 46 | "source": [ 47 | "mkdir c:\\temp\\SQLFiles" 48 | ], 49 | "metadata": { 50 | "azdata_cell_guid": "faf204dc-5a96-4c46-b4aa-38b10ab49037" 51 | }, 52 | "outputs": [ 53 | { 54 | "output_type": "stream", 55 | "name": "stdout", 56 | "text": "\n\n Directory: C:\\temp\n\n\nMode LastWriteTime Length Name \n---- ------------- ------ ---- \nd----- 8/28/2020 6:13 PM SQLFiles \n\n\n" 57 | } 58 | ], 59 | "execution_count": 2 60 | }, 61 | { 62 | "cell_type": "markdown", 63 | "source": [ 64 | "Switch to a folder where you have a lot of `.SQL` files." 65 | ], 66 | "metadata": { 67 | "azdata_cell_guid": "d5be3b3d-a666-4df1-b487-da0ff4020f05" 68 | } 69 | }, 70 | { 71 | "cell_type": "code", 72 | "source": [ 73 | "cd c:\\temp\\SQLFiles" 74 | ], 75 | "metadata": { 76 | "azdata_cell_guid": "399042e8-4824-4ec7-a605-1047e0b1a4ba" 77 | }, 78 | "outputs": [ 79 | { 80 | "output_type": "stream", 81 | "name": "stdout", 82 | "text": "" 83 | } 84 | ], 85 | "execution_count": 3 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "source": [ 90 | "If you don't have any .SQL files handy, download some from GitHub\r\n", 91 | "(use the step below.)" 92 | ], 93 | "metadata": { 94 | "azdata_cell_guid": "de0cdacb-d1f3-4237-b834-64de0e2a10bb" 95 | } 96 | }, 97 | { 98 | "cell_type": "code", 99 | "source": [ 100 | "\r\n", 101 | "irm https://gist.githubusercontent.com/MsSQLGirl/799d3613c6b3aba58cb4decbb30da139/raw/433ffdcefcbc4db0e5f5c9b53e1e9bde139f885d/SQLSample_01_ServerProperties.sql > '.\\SQLSample_01_ServerProperties.sql'\r\n", 102 | "irm https://gist.githubusercontent.com/MsSQLGirl/799d3613c6b3aba58cb4decbb30da139/raw/433ffdcefcbc4db0e5f5c9b53e1e9bde139f885d/SQLSample_02_WWI.sql > '.\\SQLSample_02_WWI.sql'\r\n", 103 | "irm https://gist.githubusercontent.com/MsSQLGirl/799d3613c6b3aba58cb4decbb30da139/raw/433ffdcefcbc4db0e5f5c9b53e1e9bde139f885d/SQLSample_03_StringDynamics.sql > '.\\SQLSample_03_StringDynamics.sql'\r\n", 104 | "irm https://gist.githubusercontent.com/MsSQLGirl/799d3613c6b3aba58cb4decbb30da139/raw/433ffdcefcbc4db0e5f5c9b53e1e9bde139f885d/SQLSample_04_VariableBatchConundrum.sql > '.\\SQLSample_04_VariableBatchConundrum.sql'\r\n", 105 | "irm https://gist.githubusercontent.com/vickyharp/d188b5ab2ceec12896b4a514ea52e5b6/raw/f2e4b1bc4d6a2fb293aebb9989129bd722d6a25e/AdventureWorksAddress.sql > '.\\AdventureWorksAddress.sql'\r\n", 106 | "irm https://gist.githubusercontent.com/vickyharp/6c254d63d3de9850b20b5861b061b5f5/raw/0ff7d7c5da9f216fb7534994c8be60fe0e7efaf3/AdventureWorksMultiStatementSBatch.sql > '.\\AdventureWorksMultiStatementSBatch.sql'\r\n", 107 | "irm https://raw.githubusercontent.com/microsoft/tigertoolbox/master/BPCheck/Check_BP_Servers.sql > '.\\Check_BP_Servers.sql'\r\n", 108 | "" 109 | ], 110 | "metadata": { 111 | "azdata_cell_guid": "a84484aa-d595-49f7-91b3-8d6e1a66c739" 112 | }, 113 | "outputs": [ 114 | { 115 | "output_type": "stream", 116 | "name": "stdout", 117 | "text": "" 118 | } 119 | ], 120 | "execution_count": 4 121 | }, 122 | { 123 | "cell_type": "markdown", 124 | "source": [ 125 | "## Here's the part where it gets good!\r\n", 126 | "\r\n", 127 | "Now use `dir` to loop over all the .SQL files in the directory, and use the `ConvertTo-SQLNoteBook` function to turn them into SQL Notebooks." 128 | ], 129 | "metadata": { 130 | "azdata_cell_guid": "57307f7e-a897-4d9a-801e-f6b9396a6690" 131 | } 132 | }, 133 | { 134 | "cell_type": "code", 135 | "source": [ 136 | "Get-ChildItem -Recurse *.SQL | \r\n", 137 | "foreach {\r\n", 138 | " ConvertTo-SQLNoteBook -InputFileName $_.FullName -OutputNotebookName (Join-Path -Path (Split-Path -Path $_.FullName -Parent) -ChildPath ($_.Name -replace '.sql', '.ipynb'))\r\n", 139 | "}" 140 | ], 141 | "metadata": { 142 | "azdata_cell_guid": "5ebfab75-513b-4a66-a1db-dc38f43d82ce" 143 | }, 144 | "outputs": [ 145 | { 146 | "output_type": "stream", 147 | "name": "stdout", 148 | "text": "" 149 | } 150 | ], 151 | "execution_count": 5 152 | }, 153 | { 154 | "cell_type": "code", 155 | "source": [ 156 | "Get-ChildItem -Recurse *.SQL" 157 | ], 158 | "metadata": { 159 | "azdata_cell_guid": "e2ec8db5-6ac4-4eba-b4f3-0f88be7ebd14" 160 | }, 161 | "outputs": [], 162 | "execution_count": null 163 | }, 164 | { 165 | "cell_type": "markdown", 166 | "source": [ 167 | "Check inside that same directory, and you should now see a bunch of .IPYNB files." 168 | ], 169 | "metadata": { 170 | "azdata_cell_guid": "bec32cbd-7593-4ff5-a64f-a360647058c2" 171 | } 172 | }, 173 | { 174 | "cell_type": "code", 175 | "source": [ 176 | "dir -Filter *.ipynb" 177 | ], 178 | "metadata": { 179 | "azdata_cell_guid": "cc62ccf7-3ad0-4c9e-8490-8894017be4e4" 180 | }, 181 | "outputs": [ 182 | { 183 | "output_type": "stream", 184 | "name": "stdout", 185 | "text": "\n\n Directory: C:\\temp\\SQLFiles\n\n\nMode LastWriteTime Length Name \n---- ------------- ------ ---- \n-a---- 8/28/2020 6:14 PM 1933 AdventureWorksAddress.ipynb \n-a---- 8/28/2020 6:14 PM 4051 AdventureWorksMultiStatementSBatch.ipynb \n-a---- 8/28/2020 6:14 PM 1032013 Check_BP_Servers.ipynb \n-a---- 8/28/2020 6:14 PM 2581 SQLSample_01_ServerProperties.ipynb \n-a---- 8/28/2020 6:14 PM 5513 SQLSample_02_WWI.ipynb \n-a---- 8/28/2020 6:14 PM 4026 SQLSample_03_StringDynamics.ipynb \n-a---- 8/28/2020 6:14 PM 1853 SQLSample_04_VariableBatchConundrum.ipynb \n\n\n" 186 | } 187 | ], 188 | "execution_count": 6 189 | }, 190 | { 191 | "cell_type": "markdown", 192 | "source": [ 193 | "Now let's have a look at one of the Notebooks we just built. Grab one and open it up in Azure Data Studio." 194 | ], 195 | "metadata": { 196 | "azdata_cell_guid": "4cbaa36c-82cf-42ea-a900-18dfdea860ae" 197 | } 198 | } 199 | ] 200 | } --------------------------------------------------------------------------------