├── .devcontainer
└── devcontainer.json
├── .gitattributes
├── .github
├── CODE_OF_CONDUCT.md
├── ISSUE_TEMPLATE.md
├── PULL_REQUEST_TEMPLATE.md
└── dependabot.yml
├── .gitignore
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── SECURITY.md
├── architecture
├── architecture.py
└── nlp_to_sql_architecture.png
├── azure.yaml
├── infra
├── abbreviations.json
├── azuredeploy.json
├── main.bicep
├── main.parameters.json
└── modules
│ ├── openai.bicep
│ ├── speech.bicep
│ └── sql.bicep
├── media
├── ai-in-a-box.png
└── banner-nlp-to-sql-in-a-box.png
└── src
├── __init__.py
├── app.py
├── database
├── __init__.py
├── service.py
└── utils.py
├── kernel
├── __init__.py
└── service.py
├── orchestrator
├── __init__.py
└── service.py
├── plugins
├── database_plugin.py
└── nlp_to_sql
│ ├── config.json
│ └── skprompt.txt
├── requirements.txt
└── speech
├── __init__.py
└── service.py
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the
2 | // README at: https://github.com/devcontainers/templates/tree/main/src/python
3 | {
4 | "name": "Python 3",
5 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
6 | "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
7 | "features": {
8 | "ghcr.io/devcontainers/features/azure-cli:1": {},
9 | "ghcr.io/azure/azure-dev/azd:0": {}
10 | }
11 |
12 | // Features to add to the dev container. More info: https://containers.dev/features.
13 | // "features": {},
14 |
15 | // Use 'forwardPorts' to make a list of ports inside the container available locally.
16 | // "forwardPorts": [],
17 |
18 | // Use 'postCreateCommand' to run commands after the container is created.
19 | // "postCreateCommand": "pip3 install --user -r requirements.txt",
20 |
21 | // Configure tool-specific properties.
22 | // "customizations": {},
23 |
24 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
25 | // "remoteUser": "root"
26 | }
27 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto eol=lf
2 | *.{cmd,[cC][mM][dD]} text eol=crlf
3 | *.{bat,[bB][aA][tT]} text eol=crlf
--------------------------------------------------------------------------------
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Microsoft Open Source Code of Conduct
2 |
3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4 |
5 | Resources:
6 |
7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
10 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
4 | > Please provide us with the following information:
5 | > ---------------------------------------------------------------
6 |
7 | ### This issue is for a: (mark with an `x`)
8 | ```
9 | - [ ] bug report -> please search issues before submitting
10 | - [ ] feature request
11 | - [ ] documentation issue or request
12 | - [ ] regression (a behavior that used to work and stopped in a new release)
13 | ```
14 |
15 | ### Minimal steps to reproduce
16 | >
17 |
18 | ### Any log messages given by the failure
19 | >
20 |
21 | ### Expected/desired behavior
22 | >
23 |
24 | ### OS and Version?
25 | > Windows 7, 8 or 10. Linux (which distribution). macOS (Yosemite? El Capitan? Sierra?)
26 |
27 | ### Versions
28 | >
29 |
30 | ### Mention any other details that might be useful
31 |
32 | > ---------------------------------------------------------------
33 | > Thanks! We'll be in touch soon.
34 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Purpose
2 |
3 | * ...
4 |
5 | ## Does this introduce a breaking change?
6 |
7 | ```
8 | [ ] Yes
9 | [ ] No
10 | ```
11 |
12 | ## Pull Request Type
13 | What kind of change does this Pull Request introduce?
14 |
15 |
16 | ```
17 | [ ] Bugfix
18 | [ ] Feature
19 | [ ] Code style update (formatting, local variables)
20 | [ ] Refactoring (no functional changes, no api changes)
21 | [ ] Documentation content changes
22 | [ ] Other... Please describe:
23 | ```
24 |
25 | ## How to Test
26 | * Get the code
27 |
28 | ```
29 | git clone [repo-address]
30 | cd [repo-name]
31 | git checkout [branch-name]
32 | npm install
33 | ```
34 |
35 | * Test the code
36 |
37 | ```
38 | ```
39 |
40 | ## What to Check
41 | Verify that the following are valid
42 | * ...
43 |
44 | ## Other Information
45 |
46 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for more information:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 | # https://containers.dev/guide/dependabot
6 |
7 | version: 2
8 | updates:
9 | - package-ecosystem: "devcontainers"
10 | directory: "/"
11 | schedule:
12 | interval: weekly
13 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .azure
2 | # Byte-compiled / optimized / DLL files
3 | __pycache__/
4 | *.py[cod]
5 | *$py.class
6 |
7 | # C extensions
8 | *.so
9 |
10 | # Distribution / packaging
11 | .Python
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 | cover/
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | .pybuilder/
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | # For a library or package, you might want to ignore these files since the code is
88 | # intended to run in multiple environments; otherwise, check them in:
89 | # .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # poetry
99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
100 | # This is especially recommended for binary packages to ensure reproducibility, and is more
101 | # commonly ignored for libraries.
102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
103 | #poetry.lock
104 |
105 | # pdm
106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
107 | #pdm.lock
108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
109 | # in version control.
110 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
111 | .pdm.toml
112 | .pdm-python
113 | .pdm-build/
114 |
115 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
116 | __pypackages__/
117 |
118 | # Celery stuff
119 | celerybeat-schedule
120 | celerybeat.pid
121 |
122 | # SageMath parsed files
123 | *.sage.py
124 |
125 | # Environments
126 | .env
127 | .venv
128 | env/
129 | venv/
130 | ENV/
131 | env.bak/
132 | venv.bak/
133 |
134 | # Spyder project settings
135 | .spyderproject
136 | .spyproject
137 |
138 | # Rope project settings
139 | .ropeproject
140 |
141 | # mkdocs documentation
142 | /site
143 |
144 | # mypy
145 | .mypy_cache/
146 | .dmypy.json
147 | dmypy.json
148 |
149 | # Pyre type checker
150 | .pyre/
151 |
152 | # pytype static type analyzer
153 | .pytype/
154 |
155 | # Cython debug symbols
156 | cython_debug/
157 |
158 | # PyCharm
159 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
160 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
161 | # and can be added to the global gitignore or merged into this file. For a more nuclear
162 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
163 | .idea/
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to Azure-Samples/nlp-sql-in-a-box
2 |
3 | This project welcomes contributions and suggestions. Most contributions require you to agree to a
4 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
5 | the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
6 |
7 | When you submit a pull request, a CLA bot will automatically determine whether you need to provide
8 | a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
9 | provided by the bot. You will only need to do this once across all repos using our CLA.
10 |
11 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
12 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
13 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
14 |
15 | - [Code of Conduct](#coc)
16 | - [Issues and Bugs](#issue)
17 | - [Feature Requests](#feature)
18 | - [Submission Guidelines](#submit)
19 |
20 | ## Code of Conduct
21 | Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
22 |
23 | ## Found an Issue?
24 | If you find a bug in the source code or a mistake in the documentation, you can help us by
25 | [submitting an issue](#submit-issue) to the GitHub Repository. Even better, you can
26 | [submit a Pull Request](#submit-pr) with a fix.
27 |
28 | ## Want a Feature?
29 | You can *request* a new feature by [submitting an issue](#submit-issue) to the GitHub
30 | Repository. If you would like to *implement* a new feature, please submit an issue with
31 | a proposal for your work first, to be sure that we can use it.
32 |
33 | * **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
34 |
35 | ## Submission Guidelines
36 |
37 | ### Submitting an Issue
38 | Before you submit an issue, search the archive, maybe your question was already answered.
39 |
40 | If your issue appears to be a bug, and hasn't been reported, open a new issue.
41 | Help us to maximize the effort we can spend fixing issues and adding new
42 | features, by not reporting duplicate issues. Providing the following information will increase the
43 | chances of your issue being dealt with quickly:
44 |
45 | * **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps
46 | * **Version** - what version is affected (e.g. 0.1.2)
47 | * **Motivation for or Use Case** - explain what are you trying to do and why the current behavior is a bug for you
48 | * **Browsers and Operating System** - is this a problem with all browsers?
49 | * **Reproduce the Error** - provide a live example or a unambiguous set of steps
50 | * **Related Issues** - has a similar issue been reported before?
51 | * **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be
52 | causing the problem (line of code or commit)
53 |
54 | You can file new issues by providing the above information at the corresponding repository's issues link: https://github.com/Azure-Samples/nlp-sql-in-a-box/issues/new].
55 |
56 | ### Submitting a Pull Request (PR)
57 | Before you submit your Pull Request (PR) consider the following guidelines:
58 |
59 | * Search the repository (https://github.com/Azure-Samples/nlp-sql-in-a-box/pulls) for an open or closed PR
60 | that relates to your submission. You don't want to duplicate effort.
61 |
62 | * Make your changes in a new git fork:
63 |
64 | * Commit your changes using a descriptive commit message
65 | * Push your fork to GitHub:
66 | * In GitHub, create a pull request
67 | * If we suggest changes then:
68 | * Make the required updates.
69 | * Rebase your fork and force push to your GitHub repository (this will update your Pull Request):
70 |
71 | ```shell
72 | git rebase master -i
73 | git push -f
74 | ```
75 |
76 | That's it! Thank you for your contribution!
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Chris Ayers
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_type: sample
3 | languages:
4 | - azdeveloper
5 | - powershell
6 | - bicep
7 | - python
8 | products:
9 | - azure
10 | - ai-services
11 | - azure-openai
12 | - azure-speech
13 | - azure-sql-database
14 | urlFragment: nlp-sql-in-a-box
15 | name: NLP to SQL Chatbot in-a-box (AI-in-a-Box) with enables Azure SQL databases, Azure OpenAI, Semantic Kernel, and Azure AI Speech Service
16 | description: Users to interact with SQL databases using natural language and speech, leveraging Azure OpenAI, Semantic Kernel, and Azure AI Speech Service to translate spoken queries into SQL statements, execute them, and deliver results audibly, ensuring an intuitive and user-friendly experience.
17 | ---
18 |
19 |
20 | # NLP-SQL-in-a-Box
21 |
22 | |||
23 | |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| ---:|
24 | | This solution is part of the AI-in-a-Box framework developed by the team of Microsoft Customer Engineers and Architects to accelerate the deployment of AI and ML solutions. Our goal is to simplify the adoption of AI technologies by providing ready-to-use accelerators that ensure quality, efficiency, and rapid deployment.|  |
25 |
26 | ## User Story
27 |
28 | Build a cutting-edge speech-enabled SQL query system using Azure Open AI, Semantic Kernel, and Azure AI Speech Service
29 |
30 | We will use the power of Azure Open AI and Semantic Kernel to translate your natural language queries into SQL statements that can be executed against an SQL Server database. This will allow you to interact with your data in a more intuitive and user-friendly way. No more struggling with complex SQL syntax – just speak your query and let the system do the rest!
31 |
32 | And with Azure Speech Services, we will convert your speech into text and synthesize the results as speech. This means that you can hear the results of your query spoken back to you, making it easier to understand and digest the information.
33 |
34 | 
35 |
36 | ## What's in the Box
37 |
38 |
39 | - Python application that leverages [Semantic Kernel](https://learn.microsoft.com/en-us/semantic-kernel/overview/) and [Speech Services](https://azure.microsoft.com/en-us/products/ai-services/ai-speech) to build a chatbot that can:
40 | - Understand natural language database queries from speech
41 | - Translate them into SQL
42 | - Execute the SQL against an SQL Server database
43 | - Return the results as speech
44 | - Deployment templates of all resources needed, which includes:
45 | - [OpenAI Service and Deployment](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
46 | - [Speech Services](https://azure.microsoft.com/en-us/products/ai-services/ai-speech)
47 | - [SQL Server](https://azure.microsoft.com/en-us/products/azure-sql/database/)
48 | - Resources are deployed and used with security best practices in mind
49 | - Speech and OpenAI services do not allow api keys access
50 | - SQL Server requires Active Directory authentication
51 | - Required RBAC roles are assigned to the user deploying the solution
52 | - Application connects to all services using azure credential
53 |
54 | This solution was adapted from the [Revolutionizing SQL Queries with Azure Open AI and Semantic Kernel](https://techcommunity.microsoft.com/t5/analytics-on-azure-blog/revolutionizing-sql-queries-with-azure-open-ai-and-semantic/ba-p/3913513) blog post.
55 |
56 | ## Thinking Outside the Box
57 | This solution can be adapted for many other use cases. Here are some ideas:
58 |
59 | - Update the nlp_to_sql plugin to support more complex queries (including updates, deletes, etc.)
60 | - Add more plugins to the semantic kernel to support additional use cases
61 | - Add other options to interact with the kernel (e.g., a web interface, a mobile app, etc.)
62 |
63 | ## Deploy the Solution
64 |
65 | ### Deploy Pre-requisites
66 | 1. An [Azure subscription](https://azure.microsoft.com/en-us/free/)
67 | 2. Install [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli-windows?view=azure-cli-latest)
68 | 3. Install [Bicep](https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/install)
69 | 4. Install [Azure Developer CLI](https://learn.microsoft.com/en-us/azure/developer/azure-developer-cli/install-azd)
70 |
71 | ### UI Deploy
72 |
73 | [](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure-Samples%2Fnlp-sql-in-a-box%2Fmain%2Finfra%2Fazuredeploy.json)
74 |
75 | #### Required Input Parameters
76 | The parameters below are required in order to deploy the infrastructure.
77 | - Subscription
78 | - Region
79 | - Environment Name
80 | - Principal Id
81 | - You can find this by running the following command:
82 | ```bash
83 | az ad signed-in-user show --query id -o tsv
84 | ```
85 | - Administrator Login
86 |
87 | #### Optional Input Parameters
88 | - IP Address
89 | - If you want to allow your IP address to access the SQL Server, you can provide it here.
90 |
91 | #### Output Parameters
92 | After the deployment is complete, you can find the output parameters by clicking on the `Outputs` tab.
93 | You need to create an `.env` file in the root of the project and fill it with the output parameters. The `.env` file should look like this:
94 | ```bash
95 | AZURE_LOCATION=""
96 | AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=""
97 | AZURE_OPENAI_ENDPOINT=""
98 | SPEECH_SERVICE_ID=""
99 | SQL_SERVER_NAME = ""
100 | SQL_DATABASE_NAME = ""
101 | ```
102 |
103 | Note: whenever the biceps files are changed, the `azuredeploy.json` file must be updated. To do this, run the following command:
104 |
105 | ```bash
106 | az bicep build --file infra/main.bicep --outfile infra/azuredeploy.json
107 | ```
108 |
109 | ### Azd deploy
110 | 1. Clone this repository locally
111 |
112 | `git clone https://github.com/Azure-Samples/nlp-sql-in-a-box/`
113 | 2. Deploy resources
114 |
115 | `az login`
116 |
117 | `azd auth login`
118 |
119 | `azd up`
120 |
121 | You will be prompted for:
122 | - environment name
123 | - azure subscription
124 | - azure region (we suggest using `eastus2`)
125 | - database administrator login
126 |
127 | When you deploy using this method, the `.env` file will be created automatically with the output parameters.
128 |
129 | ### Clean up
130 | To remove all resources created by this solution, run:
131 |
132 | `azd down`
133 |
134 | ## Run the Solution
135 |
136 | ### Run Pre-requisites
137 | 1. Install Python 3.10
138 | 2. Install [ODBC Driver for SQL Server](https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server)
139 | 3. Make sure you can access the resources deployed from your local machine.
140 | - By default, all resources were created with no public access.
141 | - You can allow your own IP address to access the resources by:
142 | - Find out your what's your IPv4 address
143 | - `azd env set IP_ADDRESS `
144 | - `azd up`
145 | 4. Install requirements
146 |
147 | `pip install -r src/requirements.txt`
148 |
149 | ### Run Locally
150 |
151 | `python -m src.app`
152 |
153 | The first time you run the application, it will create and populate the database with fake data. This process may take a few minutes.
154 |
155 | #### Logging
156 | The application will output logs to the `app.log` file, so you can use it to better understand what's happening.
157 | If you need more information, you can change the log level to DEBUG `app.py` file:
158 | ```python
159 | logging.basicConfig(
160 | filename="app.log",
161 | format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
162 | datefmt="%Y-%m-%d %H:%M:%S",
163 | level=logging.DEBUG,
164 | )
165 | ```
166 |
167 | ### Example Usage
168 | Below you can see an example of the solution in action:
169 |
170 | ````
171 | $ python -m src.app
172 | Listening:
173 | User > How many locations are there?
174 | tool plugins-nlp_to_sql needs to be called with parameters {}
175 | tool plugins-nlp_to_sql called and returned There are `1` tool call arguments required and only `0` received. The required arguments are: ['input']. Please provide the required arguments and try again.
176 | tool plugins-nlp_to_sql needs to be called with parameters {"input":"How many locations are there?"}
177 | tool plugins-nlp_to_sql called and returned ```sql
178 | SELECT COUNT(DISTINCT Location) AS NumberOfLocations FROM ExplorationProduction;
179 | ```
180 | tool plugins-query needs to be called with parameters {"query":"SELECT COUNT(DISTINCT Location) AS NumberOfLocations FROM ExplorationProduction;"}
181 | tool plugins-query called and returned (1000,)
182 | Assistant > There are 1000 distinct locations.
183 | Listening:
184 | User > Yes.
185 | Listening:
186 | User > Can you list me the top five locations by production volume?
187 | tool plugins-nlp_to_sql needs to be called with parameters {"input":"biggest five locations by production volume"}
188 | tool plugins-nlp_to_sql called and returned ```sql
189 | SELECT TOP 5 Location, SUM(ProductionVolume) AS TotalProductionVolume
190 | FROM ExplorationProduction
191 | GROUP BY Location
192 | ORDER BY TotalProductionVolume DESC;
193 | ```
194 | tool plugins-query needs to be called with parameters {"query":"SELECT TOP 5 Location, SUM(ProductionVolume) AS TotalProductionVolume FROM ExplorationProduction GROUP BY Location ORDER BY TotalProductionVolume DESC;"}
195 | tool plugins-query called and returned ('West Travishaven, Vietnam', Decimal('999300.73')),('Baileyville, Israel', Decimal('998248.91')),('Williamsborough, Wallis and Futuna', Decimal('997729.20')),('Lake Gabrielshire, Panama', Decimal('996433.80')),('Davidstad, Saint Kitts and Nevis', Decimal('994778.98'))
196 | Assistant > Here are the five locations with the highest production volumes:
197 |
198 | 1. **West Travishaven, Vietnam**: 999,300.73
199 | 2. **Baileyville, Israel**: 998,248.91
200 | 3. **Williamsborough, Wallis and Futuna**: 997,729.20
201 | 4. **Lake Gabrielshire, Panama**: 996,433.80
202 | 5. **Davidstad, Saint Kitts and Nevis**: 994,778.98
203 | Listening:
204 | User > No.
205 | ````
206 |
207 | ## Customize the Solution
208 |
209 | ### Add More Plugins
210 | You can add more plugins by:
211 | 1. Creating a new Python file in the `src/plugins` directory
212 | 2. Implementing your plugin as a class (more details in [Plugins](https://learn.microsoft.com/en-us/semantic-kernel/concepts/plugins/?pivots=programming-language-python))
213 |
214 | ### Reusing the Kernel
215 | If you want to reuse this logic in another project, it is really easy, You just need to reuse the src/kernel package in your project, passing the required parameters.
216 |
217 | ## How to Contribute
218 |
219 | This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit
220 |
221 | When you submit a pull request, a CLA bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA.
222 |
223 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq) or contact with any additional questions or comments.
224 |
225 | ## Key Contacts & Contributors
226 |
227 | Highlight the main contacts for the project and acknowledge contributors. You can adapt the structure from AI-in-a-Box:
228 |
229 | | Contact | GitHub ID | Email |
230 | |--------------------|---------------------|--------------------------|
231 | | Franklin Guimaraes | @franklinlindemberg | fguimaraes@microsoft.com |
232 |
233 |
234 | ## License
235 |
236 | This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies.
237 |
238 | ## FAQ
239 |
240 | 1. ```commandline
241 | Server is not found or not accessible. Check if instance name is correct and if SQL Server is configured to allow remote connections. For more information see SQL Server Books Online. (11001)')
242 | ```
243 | This error is due to the fact that the SQL Server is not accessible from the machine where the application is running. Check [Run Pre-requisites](#run-pre-requisites) for more details.
244 |
245 |
246 | ---
247 |
248 | This project is part of the AI-in-a-Box series, aimed at providing the technical community with tools and accelerators to implement AI/ML solutions efficiently and effectively.
249 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Security
4 |
5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
6 |
7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](), please report it to us as described below.
8 |
9 | ## Reporting Security Issues
10 |
11 | **Please do not report security vulnerabilities through public GitHub issues.**
12 |
13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
14 |
15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/msrc/pgp-key-msrc).
16 |
17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
18 |
19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
20 |
21 | - Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
22 | - Full paths of source file(s) related to the manifestation of the issue
23 | - The location of the affected source code (tag/branch/commit or direct URL)
24 | - Any special configuration required to reproduce the issue
25 | - Step-by-step instructions to reproduce the issue
26 | - Proof-of-concept or exploit code (if possible)
27 | - Impact of the issue, including how an attacker might exploit the issue
28 |
29 | This information will help us triage your report more quickly.
30 |
31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
32 |
33 | ## Preferred Languages
34 |
35 | We prefer all communications to be in English.
36 |
37 | ## Policy
38 |
39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/msrc/cvd).
40 |
41 |
42 |
--------------------------------------------------------------------------------
/architecture/architecture.py:
--------------------------------------------------------------------------------
1 | from diagrams import Diagram, Cluster, Edge
2 | from diagrams.azure.database import SQLServers
3 | from diagrams.azure.ml import CognitiveServices
4 | from diagrams.programming.language import Python
5 | from diagrams.gcp.ml import SpeechToText, TextToSpeech
6 | from diagrams.azure.general import Usericon
7 | from diagrams.elastic.beats import Filebeat
8 |
9 | with Diagram("NLP to SQL Architecture", show=False):
10 | stt = SpeechToText("Speech to Text")
11 | tts = TextToSpeech("Text to Speech")
12 | sql_server = SQLServers("SQL Server")
13 | open_ai = CognitiveServices("OpenAI")
14 |
15 | with Cluster("Orchestrator"):
16 | orchestrator = Python("Orchestrator")
17 | with Cluster("Semantic Kernel"):
18 | semantic_kernel = Python("Semantic Kernel")
19 | chat_completion = Filebeat("Chat Completion")
20 |
21 | with Cluster("Plugins"):
22 | query_db = Python("Query DB")
23 | nlp_to_sql = Filebeat("NLP to SQL")
24 |
25 | semantic_kernel >> Edge(label="interact with chat") << chat_completion
26 | semantic_kernel >> Edge(label="use translated SQL on the database") << query_db
27 | semantic_kernel >> Edge(label="translate query to SQL") << nlp_to_sql
28 | query_db >> Edge() << sql_server
29 | chat_completion >> Edge() << open_ai
30 | nlp_to_sql >> Edge() << open_ai
31 |
32 | orchestrator >> Edge() << stt
33 | orchestrator >> Edge() << tts
34 | orchestrator >> Edge() << semantic_kernel
35 |
36 | Usericon() >> Edge(label="voice request/response") << orchestrator
37 |
--------------------------------------------------------------------------------
/architecture/nlp_to_sql_architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/nlp-sql-in-a-box/b59b6b02fd5e468a51a90c3da4c7a24c483c4d6f/architecture/nlp_to_sql_architecture.png
--------------------------------------------------------------------------------
/azure.yaml:
--------------------------------------------------------------------------------
1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/Azure/azure-dev/main/schemas/v1.0/azure.yaml.json
2 |
3 | name: nlpsql-in-a-box
4 | hooks:
5 | postprovision:
6 | # updates the .env file with the values from the azd environment
7 | windows:
8 | shell: pwsh
9 | run: azd env get-values > .env
10 | posix:
11 | shell: sh
12 | run: azd env get-values > .env
13 |
--------------------------------------------------------------------------------
/infra/abbreviations.json:
--------------------------------------------------------------------------------
1 | {
2 | "analysisServicesServers": "as",
3 | "apiManagementService": "apim-",
4 | "appConfigurationConfigurationStores": "appcs-",
5 | "appManagedEnvironments": "cae-",
6 | "appContainerApps": "ca-",
7 | "authorizationPolicyDefinitions": "policy-",
8 | "automationAutomationAccounts": "aa-",
9 | "blueprintBlueprints": "bp-",
10 | "blueprintBlueprintsArtifacts": "bpa-",
11 | "cacheRedis": "redis-",
12 | "cdnProfiles": "cdnp-",
13 | "cdnProfilesEndpoints": "cdne-",
14 | "cognitiveServicesAccounts": "cog-",
15 | "cognitiveServicesBing": "cog-bg",
16 | "cognitiveServicesOpenAI": "cog-oa-",
17 | "cognitiveServicesFormRecognizer": "cog-fr-",
18 | "cognitiveServicesSpeech": "cog-sp-",
19 | "cognitiveServicesTextAnalytics": "cog-ta-",
20 | "cognitiveServicesBot": "cog-bot-",
21 | "computeAvailabilitySets": "avail-",
22 | "computeCloudServices": "cld-",
23 | "computeDiskEncryptionSets": "des",
24 | "computeDisks": "disk",
25 | "computeDisksOs": "osdisk",
26 | "computeGalleries": "gal",
27 | "computeSnapshots": "snap-",
28 | "computeVirtualMachines": "vm",
29 | "computeVirtualMachineScaleSets": "vmss-",
30 | "containerInstanceContainerGroups": "ci",
31 | "containerRegistryRegistries": "cr",
32 | "containerServiceManagedClusters": "aks-",
33 | "databricksWorkspaces": "dbw-",
34 | "dataFactoryFactories": "adf-",
35 | "dataLakeAnalyticsAccounts": "dla",
36 | "dataLakeStoreAccounts": "dls",
37 | "dataMigrationServices": "dms-",
38 | "dBforMySQLServers": "mysql-",
39 | "dBforPostgreSQLServers": "psql-",
40 | "devicesIotHubs": "iot-",
41 | "devicesProvisioningServices": "provs-",
42 | "devicesProvisioningServicesCertificates": "pcert-",
43 | "documentDBDatabaseAccounts": "cosmos-",
44 | "eventGridDomains": "evgd-",
45 | "eventGridDomainsTopics": "evgt-",
46 | "eventGridEventSubscriptions": "evgs-",
47 | "eventHubNamespaces": "evhns-",
48 | "eventHubNamespacesEventHubs": "evh-",
49 | "hdInsightClustersHadoop": "hadoop-",
50 | "hdInsightClustersHbase": "hbase-",
51 | "hdInsightClustersKafka": "kafka-",
52 | "hdInsightClustersMl": "mls-",
53 | "hdInsightClustersSpark": "spark-",
54 | "hdInsightClustersStorm": "storm-",
55 | "hybridComputeMachines": "arcs-",
56 | "insightsActionGroups": "ag-",
57 | "insightsComponents": "appi-",
58 | "iotHubComponents": "iot-",
59 | "keyVaultVaults": "kv-",
60 | "kubernetesConnectedClusters": "arck",
61 | "kustoClusters": "dec",
62 | "kustoClustersDatabases": "dedb",
63 | "logicIntegrationAccounts": "ia-",
64 | "logicWorkflows": "logic-",
65 | "machineLearningServicesWorkspaces": "mlw-",
66 | "machineLearningServicesComputeCPU": "mlcpu",
67 | "machineLearningServicesComputeGPU": "mlgpu-",
68 | "machineLearningServicesCluster": "mlclus-",
69 | "managedIdentityUserAssignedIdentities": "id-",
70 | "managementManagementGroups": "mg-",
71 | "migrateAssessmentProjects": "migr-",
72 | "networkApplicationGateways": "agw-",
73 | "networkApplicationSecurityGroups": "asg-",
74 | "networkAzureFirewalls": "afw-",
75 | "networkBastionHosts": "bas-",
76 | "networkConnections": "con-",
77 | "networkDnsZones": "dnsz-",
78 | "networkExpressRouteCircuits": "erc-",
79 | "networkFirewallPolicies": "afwp-",
80 | "networkFirewallPoliciesWebApplication": "waf",
81 | "networkFirewallPoliciesRuleGroups": "wafrg",
82 | "networkFrontDoors": "fd-",
83 | "networkFrontdoorWebApplicationFirewallPolicies": "fdfp-",
84 | "networkLoadBalancersExternal": "lbe-",
85 | "networkLoadBalancersInternal": "lbi-",
86 | "networkLoadBalancersInboundNatRules": "rule-",
87 | "networkLocalNetworkGateways": "lgw-",
88 | "networkNatGateways": "ng-",
89 | "networkNetworkInterfaces": "nic-",
90 | "networkNetworkSecurityGroups": "nsg-",
91 | "networkNetworkSecurityGroupsSecurityRules": "nsgsr-",
92 | "networkNetworkWatchers": "nw-",
93 | "networkPrivateDnsZones": "pdnsz-",
94 | "networkPrivateLinkServices": "pl-",
95 | "networkPublicIPAddresses": "pip-",
96 | "networkPublicIPPrefixes": "ippre-",
97 | "networkRouteFilters": "rf-",
98 | "networkRouteTables": "rt-",
99 | "networkRouteTablesRoutes": "udr-",
100 | "networkTrafficManagerProfiles": "traf-",
101 | "networkVirtualNetworkGateways": "vgw-",
102 | "networkVirtualNetworks": "vnet-",
103 | "networkVirtualNetworksSubnets": "snet-",
104 | "networkVirtualNetworksVirtualNetworkPeerings": "peer-",
105 | "networkVirtualWans": "vwan-",
106 | "networkVpnGateways": "vpng-",
107 | "networkVpnGatewaysVpnConnections": "vcn-",
108 | "networkVpnGatewaysVpnSites": "vst-",
109 | "notificationHubsNamespaces": "ntfns-",
110 | "notificationHubsNamespacesNotificationHubs": "ntf-",
111 | "operationalInsightsWorkspaces": "log-",
112 | "portalDashboards": "dash-",
113 | "powerBIDedicatedCapacities": "pbi-",
114 | "purviewAccounts": "pview-",
115 | "recoveryServicesVaults": "rsv-",
116 | "resourcesResourceGroups": "rg-",
117 | "searchSearchServices": "srch-",
118 | "serviceBusNamespaces": "sb-",
119 | "serviceBusNamespacesQueues": "sbq-",
120 | "serviceBusNamespacesTopics": "sbt-",
121 | "serviceEndPointPolicies": "se-",
122 | "serviceFabricClusters": "sf-",
123 | "signalRServiceSignalR": "sigr",
124 | "sqlManagedInstances": "sqlmi-",
125 | "sqlServers": "sql-",
126 | "sqlServersDataWarehouse": "sqldw-",
127 | "sqlServersDatabases": "sqldb-",
128 | "sqlServersDatabasesStretch": "sqlstrdb-",
129 | "storageStorageAccounts": "st",
130 | "storageStorageAccountsVm": "stvm",
131 | "storSimpleManagers": "ssimp",
132 | "streamAnalyticsCluster": "asa-",
133 | "synapseWorkspaces": "syn",
134 | "synapseWorkspacesAnalyticsWorkspaces": "synw",
135 | "synapseWorkspacesSqlPoolsDedicated": "syndp",
136 | "synapseWorkspacesSqlPoolsSpark": "synsp",
137 | "timeSeriesInsightsEnvironments": "tsi-",
138 | "webServerFarms": "plan-",
139 | "webSitesAppService": "app-",
140 | "webSitesAppServiceEnvironment": "ase-",
141 | "webSitesFunctions": "func-",
142 | "webStaticSites": "stapp-"
143 | }
144 |
--------------------------------------------------------------------------------
/infra/azuredeploy.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://schema.management.azure.com/schemas/2018-05-01/subscriptionDeploymentTemplate.json#",
3 | "contentVersion": "1.0.0.0",
4 | "metadata": {
5 | "_generator": {
6 | "name": "bicep",
7 | "version": "0.28.1.47646",
8 | "templateHash": "4015609571791161378"
9 | }
10 | },
11 | "parameters": {
12 | "environmentName": {
13 | "type": "string",
14 | "minLength": 1,
15 | "maxLength": 64,
16 | "metadata": {
17 | "description": "Name of the the environment which is used to generate a short unique hash used in all resources."
18 | }
19 | },
20 | "resourceGroupName": {
21 | "type": "string",
22 | "defaultValue": "",
23 | "metadata": {
24 | "description": "Resource group name"
25 | }
26 | },
27 | "principalId": {
28 | "type": "string",
29 | "metadata": {
30 | "description": "User's principal id"
31 | }
32 | },
33 | "tags": {
34 | "type": "object",
35 | "defaultValue": {},
36 | "metadata": {
37 | "description": "Tags to be used for all resources"
38 | }
39 | },
40 | "openaiName": {
41 | "type": "string",
42 | "defaultValue": "",
43 | "metadata": {
44 | "description": "OpenAI resource name"
45 | }
46 | },
47 | "sqlServerName": {
48 | "type": "string",
49 | "defaultValue": "",
50 | "metadata": {
51 | "description": "SQL Server resource name"
52 | }
53 | },
54 | "sqlDatabaseName": {
55 | "type": "string",
56 | "defaultValue": "",
57 | "metadata": {
58 | "description": "Database name"
59 | }
60 | },
61 | "administratorLogin": {
62 | "type": "string",
63 | "metadata": {
64 | "description": "Set the administrator login for the SQL Server"
65 | }
66 | },
67 | "ipAddress": {
68 | "type": "string",
69 | "defaultValue": "",
70 | "metadata": {
71 | "description": "IP address to allow for SQL Server connection"
72 | }
73 | },
74 | "speechServiceName": {
75 | "type": "string",
76 | "defaultValue": "",
77 | "metadata": {
78 | "description": "Speech service resource name"
79 | }
80 | }
81 | },
82 | "variables": {
83 | "$fxv#0": {
84 | "analysisServicesServers": "as",
85 | "apiManagementService": "apim-",
86 | "appConfigurationConfigurationStores": "appcs-",
87 | "appManagedEnvironments": "cae-",
88 | "appContainerApps": "ca-",
89 | "authorizationPolicyDefinitions": "policy-",
90 | "automationAutomationAccounts": "aa-",
91 | "blueprintBlueprints": "bp-",
92 | "blueprintBlueprintsArtifacts": "bpa-",
93 | "cacheRedis": "redis-",
94 | "cdnProfiles": "cdnp-",
95 | "cdnProfilesEndpoints": "cdne-",
96 | "cognitiveServicesAccounts": "cog-",
97 | "cognitiveServicesBing": "cog-bg",
98 | "cognitiveServicesOpenAI": "cog-oa-",
99 | "cognitiveServicesFormRecognizer": "cog-fr-",
100 | "cognitiveServicesSpeech": "cog-sp-",
101 | "cognitiveServicesTextAnalytics": "cog-ta-",
102 | "cognitiveServicesBot": "cog-bot-",
103 | "computeAvailabilitySets": "avail-",
104 | "computeCloudServices": "cld-",
105 | "computeDiskEncryptionSets": "des",
106 | "computeDisks": "disk",
107 | "computeDisksOs": "osdisk",
108 | "computeGalleries": "gal",
109 | "computeSnapshots": "snap-",
110 | "computeVirtualMachines": "vm",
111 | "computeVirtualMachineScaleSets": "vmss-",
112 | "containerInstanceContainerGroups": "ci",
113 | "containerRegistryRegistries": "cr",
114 | "containerServiceManagedClusters": "aks-",
115 | "databricksWorkspaces": "dbw-",
116 | "dataFactoryFactories": "adf-",
117 | "dataLakeAnalyticsAccounts": "dla",
118 | "dataLakeStoreAccounts": "dls",
119 | "dataMigrationServices": "dms-",
120 | "dBforMySQLServers": "mysql-",
121 | "dBforPostgreSQLServers": "psql-",
122 | "devicesIotHubs": "iot-",
123 | "devicesProvisioningServices": "provs-",
124 | "devicesProvisioningServicesCertificates": "pcert-",
125 | "documentDBDatabaseAccounts": "cosmos-",
126 | "eventGridDomains": "evgd-",
127 | "eventGridDomainsTopics": "evgt-",
128 | "eventGridEventSubscriptions": "evgs-",
129 | "eventHubNamespaces": "evhns-",
130 | "eventHubNamespacesEventHubs": "evh-",
131 | "hdInsightClustersHadoop": "hadoop-",
132 | "hdInsightClustersHbase": "hbase-",
133 | "hdInsightClustersKafka": "kafka-",
134 | "hdInsightClustersMl": "mls-",
135 | "hdInsightClustersSpark": "spark-",
136 | "hdInsightClustersStorm": "storm-",
137 | "hybridComputeMachines": "arcs-",
138 | "insightsActionGroups": "ag-",
139 | "insightsComponents": "appi-",
140 | "iotHubComponents": "iot-",
141 | "keyVaultVaults": "kv-",
142 | "kubernetesConnectedClusters": "arck",
143 | "kustoClusters": "dec",
144 | "kustoClustersDatabases": "dedb",
145 | "logicIntegrationAccounts": "ia-",
146 | "logicWorkflows": "logic-",
147 | "machineLearningServicesWorkspaces": "mlw-",
148 | "machineLearningServicesComputeCPU": "mlcpu",
149 | "machineLearningServicesComputeGPU": "mlgpu-",
150 | "machineLearningServicesCluster": "mlclus-",
151 | "managedIdentityUserAssignedIdentities": "id-",
152 | "managementManagementGroups": "mg-",
153 | "migrateAssessmentProjects": "migr-",
154 | "networkApplicationGateways": "agw-",
155 | "networkApplicationSecurityGroups": "asg-",
156 | "networkAzureFirewalls": "afw-",
157 | "networkBastionHosts": "bas-",
158 | "networkConnections": "con-",
159 | "networkDnsZones": "dnsz-",
160 | "networkExpressRouteCircuits": "erc-",
161 | "networkFirewallPolicies": "afwp-",
162 | "networkFirewallPoliciesWebApplication": "waf",
163 | "networkFirewallPoliciesRuleGroups": "wafrg",
164 | "networkFrontDoors": "fd-",
165 | "networkFrontdoorWebApplicationFirewallPolicies": "fdfp-",
166 | "networkLoadBalancersExternal": "lbe-",
167 | "networkLoadBalancersInternal": "lbi-",
168 | "networkLoadBalancersInboundNatRules": "rule-",
169 | "networkLocalNetworkGateways": "lgw-",
170 | "networkNatGateways": "ng-",
171 | "networkNetworkInterfaces": "nic-",
172 | "networkNetworkSecurityGroups": "nsg-",
173 | "networkNetworkSecurityGroupsSecurityRules": "nsgsr-",
174 | "networkNetworkWatchers": "nw-",
175 | "networkPrivateDnsZones": "pdnsz-",
176 | "networkPrivateLinkServices": "pl-",
177 | "networkPublicIPAddresses": "pip-",
178 | "networkPublicIPPrefixes": "ippre-",
179 | "networkRouteFilters": "rf-",
180 | "networkRouteTables": "rt-",
181 | "networkRouteTablesRoutes": "udr-",
182 | "networkTrafficManagerProfiles": "traf-",
183 | "networkVirtualNetworkGateways": "vgw-",
184 | "networkVirtualNetworks": "vnet-",
185 | "networkVirtualNetworksSubnets": "snet-",
186 | "networkVirtualNetworksVirtualNetworkPeerings": "peer-",
187 | "networkVirtualWans": "vwan-",
188 | "networkVpnGateways": "vpng-",
189 | "networkVpnGatewaysVpnConnections": "vcn-",
190 | "networkVpnGatewaysVpnSites": "vst-",
191 | "notificationHubsNamespaces": "ntfns-",
192 | "notificationHubsNamespacesNotificationHubs": "ntf-",
193 | "operationalInsightsWorkspaces": "log-",
194 | "portalDashboards": "dash-",
195 | "powerBIDedicatedCapacities": "pbi-",
196 | "purviewAccounts": "pview-",
197 | "recoveryServicesVaults": "rsv-",
198 | "resourcesResourceGroups": "rg-",
199 | "searchSearchServices": "srch-",
200 | "serviceBusNamespaces": "sb-",
201 | "serviceBusNamespacesQueues": "sbq-",
202 | "serviceBusNamespacesTopics": "sbt-",
203 | "serviceEndPointPolicies": "se-",
204 | "serviceFabricClusters": "sf-",
205 | "signalRServiceSignalR": "sigr",
206 | "sqlManagedInstances": "sqlmi-",
207 | "sqlServers": "sql-",
208 | "sqlServersDataWarehouse": "sqldw-",
209 | "sqlServersDatabases": "sqldb-",
210 | "sqlServersDatabasesStretch": "sqlstrdb-",
211 | "storageStorageAccounts": "st",
212 | "storageStorageAccountsVm": "stvm",
213 | "storSimpleManagers": "ssimp",
214 | "streamAnalyticsCluster": "asa-",
215 | "synapseWorkspaces": "syn",
216 | "synapseWorkspacesAnalyticsWorkspaces": "synw",
217 | "synapseWorkspacesSqlPoolsDedicated": "syndp",
218 | "synapseWorkspacesSqlPoolsSpark": "synsp",
219 | "timeSeriesInsightsEnvironments": "tsi-",
220 | "webServerFarms": "plan-",
221 | "webSitesAppService": "app-",
222 | "webSitesAppServiceEnvironment": "ase-",
223 | "webSitesFunctions": "func-",
224 | "webStaticSites": "stapp-"
225 | },
226 | "location": "[deployment().location]",
227 | "abbrs": "[variables('$fxv#0')]",
228 | "uniqueSuffix": "[substring(uniqueString(subscription().id, parameters('environmentName')), 1, 5)]",
229 | "names": {
230 | "resourceGroupName": "[if(not(empty(parameters('resourceGroupName'))), parameters('resourceGroupName'), format('{0}{1}', variables('abbrs').resourcesResourceGroups, parameters('environmentName')))]",
231 | "openaiName": "[if(not(empty(parameters('openaiName'))), parameters('openaiName'), format('{0}{1}-{2}', variables('abbrs').cognitiveServicesOpenAI, parameters('environmentName'), variables('uniqueSuffix')))]",
232 | "speechServiceName": "[if(not(empty(parameters('speechServiceName'))), parameters('speechServiceName'), format('{0}{1}-{2}', variables('abbrs').cognitiveServicesSpeech, parameters('environmentName'), variables('uniqueSuffix')))]",
233 | "sqlServerName": "[if(not(empty(parameters('sqlServerName'))), parameters('sqlServerName'), format('{0}{1}-{2}', variables('abbrs').sqlServers, parameters('environmentName'), variables('uniqueSuffix')))]",
234 | "sqlDatabaseName": "[if(not(empty(parameters('sqlDatabaseName'))), parameters('sqlDatabaseName'), format('{0}{1}-{2}', variables('abbrs').sqlServersDatabases, parameters('environmentName'), variables('uniqueSuffix')))]"
235 | }
236 | },
237 | "resources": [
238 | {
239 | "type": "Microsoft.Resources/resourceGroups",
240 | "apiVersion": "2023-07-01",
241 | "name": "[variables('names').resourceGroupName]",
242 | "location": "[variables('location')]",
243 | "tags": "[parameters('tags')]"
244 | },
245 | {
246 | "type": "Microsoft.Resources/deployments",
247 | "apiVersion": "2022-09-01",
248 | "name": "deploy_openai",
249 | "resourceGroup": "[variables('names').resourceGroupName]",
250 | "properties": {
251 | "expressionEvaluationOptions": {
252 | "scope": "inner"
253 | },
254 | "mode": "Incremental",
255 | "parameters": {
256 | "location": {
257 | "value": "[variables('location')]"
258 | },
259 | "principalId": {
260 | "value": "[parameters('principalId')]"
261 | },
262 | "ipAddress": {
263 | "value": "[parameters('ipAddress')]"
264 | },
265 | "openaiName": {
266 | "value": "[variables('names').openaiName]"
267 | },
268 | "tags": {
269 | "value": "[parameters('tags')]"
270 | }
271 | },
272 | "template": {
273 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
274 | "contentVersion": "1.0.0.0",
275 | "metadata": {
276 | "_generator": {
277 | "name": "bicep",
278 | "version": "0.28.1.47646",
279 | "templateHash": "8688028833357356624"
280 | }
281 | },
282 | "parameters": {
283 | "location": {
284 | "type": "string"
285 | },
286 | "principalId": {
287 | "type": "string"
288 | },
289 | "ipAddress": {
290 | "type": "string"
291 | },
292 | "openaiName": {
293 | "type": "string"
294 | },
295 | "tags": {
296 | "type": "object",
297 | "defaultValue": {}
298 | }
299 | },
300 | "resources": [
301 | {
302 | "type": "Microsoft.CognitiveServices/accounts",
303 | "apiVersion": "2023-05-01",
304 | "name": "[parameters('openaiName')]",
305 | "location": "[parameters('location')]",
306 | "tags": "[parameters('tags')]",
307 | "sku": {
308 | "name": "S0"
309 | },
310 | "kind": "OpenAI",
311 | "properties": {
312 | "customSubDomainName": "[parameters('openaiName')]",
313 | "apiProperties": {
314 | "statisticsEnabled": false
315 | },
316 | "disableLocalAuth": true,
317 | "publicNetworkAccess": "[if(not(equals(parameters('ipAddress'), '')), 'Enabled', 'Disabled')]",
318 | "networkAcls": "[if(not(equals(parameters('ipAddress'), '')), createObject('defaultAction', 'Deny', 'ipRules', createArray(createObject('value', parameters('ipAddress')))), null())]"
319 | }
320 | },
321 | {
322 | "type": "Microsoft.CognitiveServices/accounts/deployments",
323 | "apiVersion": "2023-05-01",
324 | "name": "[format('{0}/{1}', parameters('openaiName'), 'gpt-4o')]",
325 | "properties": {
326 | "model": {
327 | "format": "OpenAI",
328 | "name": "gpt-4o",
329 | "version": "2024-05-13"
330 | }
331 | },
332 | "sku": {
333 | "capacity": 10,
334 | "name": "Standard"
335 | },
336 | "dependsOn": [
337 | "[resourceId('Microsoft.CognitiveServices/accounts', parameters('openaiName'))]"
338 | ]
339 | },
340 | {
341 | "type": "Microsoft.Authorization/roleAssignments",
342 | "apiVersion": "2022-04-01",
343 | "name": "[guid(parameters('principalId'), resourceId('Microsoft.CognitiveServices/accounts', parameters('openaiName')), resourceId('Microsoft.Authorization/roleDefinitions', '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd'))]",
344 | "properties": {
345 | "description": "User role assignment for OpenAI Service",
346 | "principalId": "[parameters('principalId')]",
347 | "principalType": "User",
348 | "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd')]"
349 | },
350 | "dependsOn": [
351 | "[resourceId('Microsoft.CognitiveServices/accounts', parameters('openaiName'))]"
352 | ]
353 | }
354 | ],
355 | "outputs": {
356 | "endpoint": {
357 | "type": "string",
358 | "value": "[reference(resourceId('Microsoft.CognitiveServices/accounts', parameters('openaiName')), '2023-05-01').endpoint]"
359 | },
360 | "deploymentName": {
361 | "type": "string",
362 | "value": "gpt-4o"
363 | }
364 | }
365 | }
366 | },
367 | "dependsOn": [
368 | "[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('names').resourceGroupName)]"
369 | ]
370 | },
371 | {
372 | "type": "Microsoft.Resources/deployments",
373 | "apiVersion": "2022-09-01",
374 | "name": "deploy_speech",
375 | "resourceGroup": "[variables('names').resourceGroupName]",
376 | "properties": {
377 | "expressionEvaluationOptions": {
378 | "scope": "inner"
379 | },
380 | "mode": "Incremental",
381 | "parameters": {
382 | "location": {
383 | "value": "[variables('location')]"
384 | },
385 | "principalId": {
386 | "value": "[parameters('principalId')]"
387 | },
388 | "ipAddress": {
389 | "value": "[parameters('ipAddress')]"
390 | },
391 | "speechServiceName": {
392 | "value": "[variables('names').speechServiceName]"
393 | },
394 | "tags": {
395 | "value": "[parameters('tags')]"
396 | }
397 | },
398 | "template": {
399 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
400 | "contentVersion": "1.0.0.0",
401 | "metadata": {
402 | "_generator": {
403 | "name": "bicep",
404 | "version": "0.28.1.47646",
405 | "templateHash": "1243482995404149893"
406 | }
407 | },
408 | "parameters": {
409 | "location": {
410 | "type": "string"
411 | },
412 | "speechServiceName": {
413 | "type": "string"
414 | },
415 | "principalId": {
416 | "type": "string"
417 | },
418 | "ipAddress": {
419 | "type": "string"
420 | },
421 | "tags": {
422 | "type": "object",
423 | "defaultValue": {}
424 | }
425 | },
426 | "resources": [
427 | {
428 | "type": "Microsoft.CognitiveServices/accounts",
429 | "apiVersion": "2022-03-01",
430 | "name": "[parameters('speechServiceName')]",
431 | "location": "[parameters('location')]",
432 | "kind": "SpeechServices",
433 | "sku": {
434 | "name": "S0",
435 | "tier": "Standard"
436 | },
437 | "properties": {
438 | "customSubDomainName": "[parameters('speechServiceName')]",
439 | "disableLocalAuth": true,
440 | "publicNetworkAccess": "[if(not(equals(parameters('ipAddress'), '')), 'Enabled', 'Disabled')]",
441 | "networkAcls": "[if(not(equals(parameters('ipAddress'), '')), createObject('defaultAction', 'Deny', 'ipRules', createArray(createObject('value', parameters('ipAddress')))), null())]"
442 | },
443 | "tags": "[parameters('tags')]"
444 | },
445 | {
446 | "type": "Microsoft.Authorization/roleAssignments",
447 | "apiVersion": "2022-04-01",
448 | "name": "[guid(parameters('principalId'), resourceId('Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), resourceId('Microsoft.Authorization/roleDefinitions', 'f2dc8367-1007-4938-bd23-fe263f013447'))]",
449 | "properties": {
450 | "description": "User role assignment for Speech Service",
451 | "principalId": "[parameters('principalId')]",
452 | "principalType": "User",
453 | "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', 'f2dc8367-1007-4938-bd23-fe263f013447')]"
454 | },
455 | "dependsOn": [
456 | "[resourceId('Microsoft.CognitiveServices/accounts', parameters('speechServiceName'))]"
457 | ]
458 | }
459 | ],
460 | "outputs": {
461 | "id": {
462 | "type": "string",
463 | "value": "[resourceId('Microsoft.CognitiveServices/accounts', parameters('speechServiceName'))]"
464 | }
465 | }
466 | }
467 | },
468 | "dependsOn": [
469 | "[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('names').resourceGroupName)]"
470 | ]
471 | },
472 | {
473 | "type": "Microsoft.Resources/deployments",
474 | "apiVersion": "2022-09-01",
475 | "name": "deploy_sql",
476 | "resourceGroup": "[variables('names').resourceGroupName]",
477 | "properties": {
478 | "expressionEvaluationOptions": {
479 | "scope": "inner"
480 | },
481 | "mode": "Incremental",
482 | "parameters": {
483 | "location": {
484 | "value": "[variables('location')]"
485 | },
486 | "principalId": {
487 | "value": "[parameters('principalId')]"
488 | },
489 | "ipAddress": {
490 | "value": "[parameters('ipAddress')]"
491 | },
492 | "sqlServerName": {
493 | "value": "[variables('names').sqlServerName]"
494 | },
495 | "sqlDatabaseName": {
496 | "value": "[variables('names').sqlDatabaseName]"
497 | },
498 | "administratorLogin": {
499 | "value": "[parameters('administratorLogin')]"
500 | },
501 | "tags": {
502 | "value": "[parameters('tags')]"
503 | }
504 | },
505 | "template": {
506 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
507 | "contentVersion": "1.0.0.0",
508 | "metadata": {
509 | "_generator": {
510 | "name": "bicep",
511 | "version": "0.28.1.47646",
512 | "templateHash": "12306994777557947896"
513 | }
514 | },
515 | "parameters": {
516 | "location": {
517 | "type": "string"
518 | },
519 | "principalId": {
520 | "type": "string"
521 | },
522 | "sqlServerName": {
523 | "type": "string"
524 | },
525 | "sqlDatabaseName": {
526 | "type": "string"
527 | },
528 | "ipAddress": {
529 | "type": "string"
530 | },
531 | "tags": {
532 | "type": "object",
533 | "defaultValue": {}
534 | },
535 | "administratorLogin": {
536 | "type": "securestring",
537 | "metadata": {
538 | "description": "Set the administrator login for the SQL Server"
539 | }
540 | }
541 | },
542 | "resources": [
543 | {
544 | "type": "Microsoft.Sql/servers",
545 | "apiVersion": "2022-11-01-preview",
546 | "name": "[parameters('sqlServerName')]",
547 | "location": "[parameters('location')]",
548 | "properties": {
549 | "administrators": {
550 | "login": "[parameters('administratorLogin')]",
551 | "principalType": "User",
552 | "azureADOnlyAuthentication": true,
553 | "sid": "[parameters('principalId')]",
554 | "tenantId": "[subscription().tenantId]"
555 | },
556 | "publicNetworkAccess": "[if(not(equals(parameters('ipAddress'), '')), 'Enabled', 'Disabled')]"
557 | },
558 | "tags": "[parameters('tags')]"
559 | },
560 | {
561 | "type": "Microsoft.Sql/servers/databases",
562 | "apiVersion": "2022-11-01-preview",
563 | "name": "[format('{0}/{1}', parameters('sqlServerName'), parameters('sqlDatabaseName'))]",
564 | "location": "[parameters('location')]",
565 | "tags": "[parameters('tags')]",
566 | "dependsOn": [
567 | "[resourceId('Microsoft.Sql/servers', parameters('sqlServerName'))]"
568 | ]
569 | },
570 | {
571 | "condition": "[not(equals(parameters('ipAddress'), ''))]",
572 | "type": "Microsoft.Sql/servers/firewallRules",
573 | "apiVersion": "2020-11-01-preview",
574 | "name": "[format('{0}/{1}', parameters('sqlServerName'), 'AllowLocalConnection')]",
575 | "properties": {
576 | "startIpAddress": "[parameters('ipAddress')]",
577 | "endIpAddress": "[parameters('ipAddress')]"
578 | },
579 | "dependsOn": [
580 | "[resourceId('Microsoft.Sql/servers', parameters('sqlServerName'))]"
581 | ]
582 | }
583 | ],
584 | "outputs": {
585 | "serverName": {
586 | "type": "string",
587 | "value": "[parameters('sqlServerName')]"
588 | },
589 | "databaseName": {
590 | "type": "string",
591 | "value": "[parameters('sqlDatabaseName')]"
592 | }
593 | }
594 | }
595 | },
596 | "dependsOn": [
597 | "[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('names').resourceGroupName)]"
598 | ]
599 | }
600 | ],
601 | "outputs": {
602 | "SQL_SERVER_NAME": {
603 | "type": "string",
604 | "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('names').resourceGroupName), 'Microsoft.Resources/deployments', 'deploy_sql'), '2022-09-01').outputs.serverName.value]"
605 | },
606 | "SQL_DATABASE_NAME": {
607 | "type": "string",
608 | "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('names').resourceGroupName), 'Microsoft.Resources/deployments', 'deploy_sql'), '2022-09-01').outputs.databaseName.value]"
609 | },
610 | "SPEECH_SERVICE_ID": {
611 | "type": "string",
612 | "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('names').resourceGroupName), 'Microsoft.Resources/deployments', 'deploy_speech'), '2022-09-01').outputs.id.value]"
613 | },
614 | "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME": {
615 | "type": "string",
616 | "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('names').resourceGroupName), 'Microsoft.Resources/deployments', 'deploy_openai'), '2022-09-01').outputs.deploymentName.value]"
617 | },
618 | "AZURE_OPENAI_ENDPOINT": {
619 | "type": "string",
620 | "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('names').resourceGroupName), 'Microsoft.Resources/deployments', 'deploy_openai'), '2022-09-01').outputs.endpoint.value]"
621 | }
622 | }
623 | }
--------------------------------------------------------------------------------
/infra/main.bicep:
--------------------------------------------------------------------------------
1 | targetScope = 'subscription'
2 |
3 | @minLength(1)
4 | @maxLength(64)
5 | @description('Name of the the environment which is used to generate a short unique hash used in all resources.')
6 | param environmentName string
7 |
8 | @description('Resource group name')
9 | param resourceGroupName string = ''
10 |
11 | @description('User\'s principal id')
12 | param principalId string
13 |
14 | @description('Tags to be used for all resources')
15 | param tags object = {}
16 |
17 | //OpenAI Module Parameters
18 | @description('OpenAI resource name')
19 | param openaiName string = ''
20 |
21 | //SQL Module Parameters
22 | @description('SQL Server resource name')
23 | param sqlServerName string = ''
24 | @description('Database name')
25 | param sqlDatabaseName string = ''
26 | @description('Set the administrator login for the SQL Server')
27 | param administratorLogin string
28 | @description('IP address to allow for SQL Server connection')
29 | param ipAddress string = ''
30 |
31 | //Speech Module Parameters
32 | @description('Speech service resource name')
33 | param speechServiceName string = ''
34 |
35 | var location = deployment().location
36 | var abbrs = loadJsonContent('abbreviations.json')
37 | var uniqueSuffix = substring(uniqueString(subscription().id, environmentName), 1, 5)
38 |
39 | var names = {
40 | resourceGroupName: !empty(resourceGroupName) ? resourceGroupName : '${abbrs.resourcesResourceGroups}${environmentName}'
41 | openaiName: !empty(openaiName) ? openaiName : '${abbrs.cognitiveServicesOpenAI}${environmentName}-${uniqueSuffix}'
42 | speechServiceName: !empty(speechServiceName) ? speechServiceName : '${abbrs.cognitiveServicesSpeech}${environmentName}-${uniqueSuffix}'
43 | sqlServerName: !empty(sqlServerName) ? sqlServerName : '${abbrs.sqlServers}${environmentName}-${uniqueSuffix}'
44 | sqlDatabaseName: !empty(sqlDatabaseName) ? sqlDatabaseName : '${abbrs.sqlServersDatabases}${environmentName}-${uniqueSuffix}'
45 | }
46 |
47 |
48 | // 1. Create resource group
49 | resource resourceGroup 'Microsoft.Resources/resourceGroups@2023-07-01' = {
50 | name: names.resourceGroupName
51 | location: location
52 | tags: tags
53 | }
54 |
55 |
56 | // 2. Deploy OpenAI
57 | module m_openai 'modules/openai.bicep' = {
58 | name: 'deploy_openai'
59 | scope: resourceGroup
60 | params: {
61 | location: location
62 | principalId: principalId
63 | ipAddress: ipAddress
64 | openaiName: names.openaiName
65 | tags: tags
66 | }
67 | }
68 |
69 | // 3. Deploy Speech Service
70 | module m_speech 'modules/speech.bicep' = {
71 | name: 'deploy_speech'
72 | scope: resourceGroup
73 | params: {
74 | location: location
75 | principalId: principalId
76 | ipAddress: ipAddress
77 | speechServiceName: names.speechServiceName
78 | tags: tags
79 | }
80 | }
81 |
82 | //4. Deploy SQL Server and Database
83 | module m_sql 'modules/sql.bicep' = {
84 | name: 'deploy_sql'
85 | scope: resourceGroup
86 | params: {
87 | location: location
88 | principalId: principalId
89 | ipAddress: ipAddress
90 | sqlServerName: names.sqlServerName
91 | sqlDatabaseName: names.sqlDatabaseName
92 | administratorLogin: administratorLogin
93 | tags: tags
94 | }
95 | }
96 |
97 | output SQL_SERVER_NAME string = m_sql.outputs.serverName
98 | output SQL_DATABASE_NAME string = m_sql.outputs.databaseName
99 | output SPEECH_SERVICE_ID string = m_speech.outputs.id
100 | output AZURE_OPENAI_CHAT_DEPLOYMENT_NAME string = m_openai.outputs.deploymentName
101 | output AZURE_OPENAI_ENDPOINT string = m_openai.outputs.endpoint
102 |
--------------------------------------------------------------------------------
/infra/main.parameters.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#",
3 | "contentVersion": "1.0.0.0",
4 | "parameters": {
5 | "environmentName": {
6 | "value": "${AZURE_ENV_NAME}"
7 | },
8 | "location": {
9 | "value": "${AZURE_LOCATION}"
10 | },
11 | "principalId": {
12 | "value": "${AZURE_PRINCIPAL_ID}"
13 | },
14 | "ipAddress": {
15 | "value": "${IP_ADDRESS}"
16 | },
17 | "resourceGroupName": {
18 | "value": "${AZURE_RG_NAME}"
19 | },
20 | "gptModel": {
21 | "value": "${AZURE_GPT_MODEL}"
22 | },
23 | "gptVersion": {
24 | "value": "${AZURE_GPT_VERSION}"
25 | },
26 | "sqlServerName": {
27 | "value": "${AZURE_SQL_SERVER_NAME}"
28 | },
29 | "sqlDatabaseName": {
30 | "value": "${AZURE_SQL_DATABASE_NAME}"
31 | },
32 | "speechServiceName": {
33 | "value": "${AZURE_SPEECH_SERVICE_NAME}"
34 | },
35 | "tags": {
36 | "value": {
37 | "Owner": "AI Team",
38 | "Project": "EdgeAI",
39 | "Environment": "Dev",
40 | "Toolkit": "Bicep"
41 | }
42 | }
43 | }
44 | }
--------------------------------------------------------------------------------
/infra/modules/openai.bicep:
--------------------------------------------------------------------------------
1 | //Declare Parameters--------------------------------------------------------------------------------------------------------------------------
2 | param location string
3 | param principalId string
4 | param ipAddress string
5 | param openaiName string
6 | param tags object = {}
7 |
8 | resource openaiService 'Microsoft.CognitiveServices/accounts@2023-05-01' = {
9 | name: openaiName
10 | location: location
11 | tags: tags
12 | sku: {
13 | name: 'S0'
14 | }
15 | kind: 'OpenAI'
16 | properties: {
17 | customSubDomainName: openaiName
18 | apiProperties: {
19 | statisticsEnabled: false
20 | }
21 | disableLocalAuth: true // do not support api key authentication
22 | publicNetworkAccess: (ipAddress != '') ? 'Enabled' : 'Disabled'
23 | networkAcls: (ipAddress != '') ? {
24 | defaultAction: 'Deny'
25 | ipRules: [
26 | {value: ipAddress}
27 | ]
28 | } : null
29 | }
30 | }
31 |
32 | resource gptDeployment 'Microsoft.CognitiveServices/accounts/deployments@2023-05-01' = {
33 | parent: openaiService
34 | name: 'gpt-4o'
35 | properties: {
36 | model: {
37 | format: 'OpenAI'
38 | name: 'gpt-4o'
39 | version: '2024-05-13'
40 | }
41 | }
42 | sku: {
43 | capacity: 10
44 | name: 'Standard'
45 | }
46 | }
47 |
48 | resource openaiServiceUserRole 'Microsoft.Authorization/roleDefinitions@2022-04-01' existing = {
49 | name: '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd' // Cognitive Services OpenAI User
50 | }
51 |
52 | resource openaiServiceRBAC 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
53 | name: guid(principalId, openaiService.id, openaiServiceUserRole.id)
54 | properties: {
55 | description: 'User role assignment for OpenAI Service'
56 | principalId: principalId
57 | principalType: 'User'
58 | roleDefinitionId: openaiServiceUserRole.id
59 | }
60 | }
61 |
62 | output endpoint string = openaiService.properties.endpoint
63 | output deploymentName string = gptDeployment.name
64 |
--------------------------------------------------------------------------------
/infra/modules/speech.bicep:
--------------------------------------------------------------------------------
1 | param location string
2 | param speechServiceName string
3 | param principalId string
4 | param ipAddress string
5 | param tags object = {}
6 |
7 | // Create Speech Service resource
8 | resource speechService 'Microsoft.CognitiveServices/accounts@2022-03-01' = {
9 | name: speechServiceName
10 | location: location
11 | kind: 'SpeechServices'
12 | sku: {
13 | name: 'S0'
14 | tier: 'Standard'
15 | }
16 | properties: {
17 | customSubDomainName: speechServiceName // Set the custom subdomain name for the Speech Service
18 | disableLocalAuth: true // do not support api key authentication
19 | publicNetworkAccess: (ipAddress != '') ? 'Enabled' : 'Disabled'
20 | networkAcls: (ipAddress != '') ? {
21 | defaultAction: 'Deny'
22 | ipRules: [
23 | {value: ipAddress}
24 | ]
25 | } : null
26 | }
27 | tags: tags
28 | }
29 |
30 | resource speechServiceUserRole 'Microsoft.Authorization/roleDefinitions@2022-04-01' existing = {
31 | name: 'f2dc8367-1007-4938-bd23-fe263f013447' // Cognitive Services Speech User
32 | }
33 |
34 | resource speechServiceRBAC 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
35 | name: guid(principalId, speechService.id, speechServiceUserRole.id)
36 | properties: {
37 | description: 'User role assignment for Speech Service'
38 | principalId: principalId
39 | principalType: 'User'
40 | roleDefinitionId: speechServiceUserRole.id
41 | }
42 | }
43 |
44 | output id string = speechService.id
45 |
--------------------------------------------------------------------------------
/infra/modules/sql.bicep:
--------------------------------------------------------------------------------
1 | param location string
2 | param principalId string
3 | param sqlServerName string
4 | param sqlDatabaseName string
5 | param ipAddress string
6 | param tags object = {}
7 |
8 | @description('Set the administrator login for the SQL Server')
9 | @secure()
10 | param administratorLogin string
11 |
12 | // Create SQL Server resource
13 | resource sqlServer 'Microsoft.Sql/servers@2022-11-01-preview' = {
14 | name: sqlServerName
15 | location: location
16 | properties: {
17 | administrators: {
18 | login: administratorLogin
19 | principalType: 'User'
20 | azureADOnlyAuthentication: true // enforces Azure AD authentication
21 | sid: principalId
22 | tenantId: subscription().tenantId
23 | }
24 | publicNetworkAccess: (ipAddress != '') ? 'Enabled' : 'Disabled'
25 | }
26 | tags: tags
27 | }
28 |
29 | // Create SQL Database resource
30 | resource sqlDatabase 'Microsoft.Sql/servers/databases@2022-11-01-preview' = {
31 | parent: sqlServer
32 | name: sqlDatabaseName
33 | location: location
34 | tags: tags
35 | }
36 |
37 |
38 | resource sqlAllowLocalConnection 'Microsoft.Sql/servers/firewallRules@2020-11-01-preview' = if (ipAddress != '') {
39 | name: 'AllowLocalConnection'
40 | parent: sqlServer
41 | properties: {
42 | startIpAddress: ipAddress
43 | endIpAddress: ipAddress
44 | }
45 | }
46 |
47 | output serverName string = sqlServer.name
48 | output databaseName string = sqlDatabase.name
49 |
--------------------------------------------------------------------------------
/media/ai-in-a-box.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/nlp-sql-in-a-box/b59b6b02fd5e468a51a90c3da4c7a24c483c4d6f/media/ai-in-a-box.png
--------------------------------------------------------------------------------
/media/banner-nlp-to-sql-in-a-box.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/nlp-sql-in-a-box/b59b6b02fd5e468a51a90c3da4c7a24c483c4d6f/media/banner-nlp-to-sql-in-a-box.png
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/nlp-sql-in-a-box/b59b6b02fd5e468a51a90c3da4c7a24c483c4d6f/src/__init__.py
--------------------------------------------------------------------------------
/src/app.py:
--------------------------------------------------------------------------------
1 | import os
2 | import asyncio
3 | import logging
4 |
5 | from dotenv import load_dotenv
6 | from azure.identity import DefaultAzureCredential
7 | from semantic_kernel.contents.chat_history import ChatHistory
8 |
9 |
10 | from .speech import Speech
11 | from .kernel import Kernel
12 | from .database import Database
13 | from .orchestrator import Orchestrator
14 |
15 |
16 | logging.basicConfig(
17 | filename="app.log",
18 | format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
19 | datefmt="%Y-%m-%d %H:%M:%S",
20 | level=logging.INFO,
21 | )
22 | logger = logging.getLogger(__name__)
23 |
24 |
25 | async def main():
26 | load_dotenv()
27 |
28 | credential = DefaultAzureCredential()
29 |
30 | server_name = os.getenv("SQL_SERVER_NAME")
31 | database_name = os.getenv("SQL_DATABASE_NAME")
32 | speech_service_id = os.getenv("SPEECH_SERVICE_ID")
33 | azure_location = os.getenv("AZURE_LOCATION")
34 | openai_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
35 | openai_deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME")
36 |
37 | speech_service = Speech(credential=credential, resource_id=speech_service_id, region=azure_location)
38 | database_service = Database(server_name=server_name, database_name=database_name, credential=credential)
39 |
40 | # Setup the database
41 | database_service.setup()
42 |
43 | kernel = Kernel(database_service=database_service, credential=credential, openai_endpoint=openai_endpoint, openai_deployment_name=openai_deployment_name)
44 |
45 | # Create a history of the conversation
46 | chat_history = ChatHistory()
47 |
48 | orchestrator = Orchestrator(speech_service=speech_service, kernel=kernel)
49 |
50 | await orchestrator.run(chat_history=chat_history)
51 |
52 |
53 | if __name__ == "__main__":
54 | asyncio.run(main())
55 |
--------------------------------------------------------------------------------
/src/database/__init__.py:
--------------------------------------------------------------------------------
1 | from .service import Database
2 |
--------------------------------------------------------------------------------
/src/database/service.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import pyodbc, struct
4 | from azure.identity import DefaultAzureCredential
5 | from faker import Faker
6 |
7 | from .utils import table_exists, create_table, insert_record
8 |
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 | scope = 'https://database.windows.net/.default'
13 |
14 |
15 | # If you have issues connecting, make sure you have the correct driver installed
16 | # ODBC Driver for SQL Server - https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server
17 | connection_string_template = 'DRIVER={driver};SERVER=tcp:{server_name}.database.windows.net,1433;DATABASE={database_name}'
18 | driver = 'ODBC Driver 18 for SQL Server'
19 |
20 |
21 | class Database:
22 | def __init__(self, server_name: str, database_name: str, credential: DefaultAzureCredential) -> None:
23 | token = credential.get_token(scope).token
24 |
25 | self.conn = get_connection(server_name=server_name, database_name=database_name, token=token)
26 |
27 | def setup(self) -> None:
28 | """
29 | Set up the database by creating the table and inserting fake records.
30 | """
31 | logger.debug("Setting up the database.")
32 | # Create a cursor object to execute SQL queries
33 | cursor = self.conn.cursor()
34 |
35 | if table_exists(cursor):
36 | # skip if table already exists
37 | return
38 |
39 | logger.debug("Creating table.")
40 | create_table(cursor)
41 |
42 | # Create Faker object
43 | fake = Faker()
44 |
45 | logger.debug("Generating and inserting records.")
46 | # Generate and insert 1,000 fake records
47 | for i in range(1000):
48 | insert_record(cursor, i, fake)
49 |
50 | # Commit the changes and close the connection
51 | self.conn.commit()
52 |
53 | logger.debug("Database setup completed.")
54 |
55 | def query(self, query: str) -> [pyodbc.Row]:
56 | """
57 | Query the database with the given SQL query.
58 | """
59 | cursor = self.conn.cursor()
60 | try:
61 | logger.debug("Querying database with: {}.".format(query))
62 | cursor.execute(query)
63 | result = cursor.fetchall()
64 | logger.debug("Successfully queried database: {}.".format(result))
65 | except Exception as ex:
66 | logger.error("Error querying database: {}.".format(ex))
67 | return "No Result Found"
68 | finally:
69 | cursor.close()
70 |
71 | return result
72 |
73 |
74 | def get_connection(server_name: str, database_name: str, token: str) -> pyodbc.Connection:
75 | # see https://learn.microsoft.com/en-us/azure/azure-sql/database/azure-sql-python-quickstart
76 | token_bytes = token.encode("UTF-16-LE")
77 | token_struct = struct.pack(f' int:
6 | """
7 | Check whether the ExplorationProduction table exists in the database.
8 | """
9 | query = '''
10 | IF (EXISTS (SELECT *
11 | FROM INFORMATION_SCHEMA.TABLES
12 | WHERE TABLE_NAME = 'ExplorationProduction'))
13 | SELECT 1 AS res
14 | ELSE SELECT 0 AS res;
15 | '''
16 |
17 | cursor.execute(query)
18 |
19 | return cursor.fetchone()[0] == 1
20 |
21 |
22 | def create_table(cursor: pyodbc.Cursor) -> None:
23 | """
24 | Create the ExplorationProduction table in the database.
25 | """
26 | query = '''
27 | CREATE TABLE ExplorationProduction (
28 | WellID INT PRIMARY KEY,
29 | WellName VARCHAR(50),
30 | Location VARCHAR(100),
31 | ProductionDate DATE,
32 | ProductionVolume DECIMAL(10, 2),
33 | Operator VARCHAR(50),
34 | FieldName VARCHAR(50),
35 | Reservoir VARCHAR(50),
36 | Depth DECIMAL(10, 2),
37 | APIGravity DECIMAL(5, 2),
38 | WaterCut DECIMAL(5, 2),
39 | GasOilRatio DECIMAL(10, 2)
40 | );
41 | '''
42 |
43 | cursor.execute(query)
44 |
45 |
46 | def insert_record(cursor: pyodbc.Cursor, i: int, fake: Faker) -> None:
47 | """
48 | Insert a fake record into the ExplorationProduction table.g
49 | """
50 | well_id = i + 1
51 | well_name = fake.word() + ' Well'
52 | location = fake.city() + ', ' + fake.country()
53 | production_date = fake.date_between(start_date='-1y', end_date='today')
54 | production_volume = fake.pydecimal(left_digits=6, right_digits=2, positive=True)
55 | operator = fake.company()
56 | field_name = fake.word() + ' Field'
57 | reservoir = fake.word() + ' Reservoir'
58 | depth = fake.pydecimal(left_digits=5, right_digits=2, positive=True)
59 | api_gravity = fake.pydecimal(left_digits=2, right_digits=2, positive=True)
60 | water_cut = fake.pydecimal(left_digits=2, right_digits=2)
61 | gas_oil_ratio = fake.pydecimal(left_digits=4, right_digits=2)
62 |
63 | query = '''
64 | INSERT INTO ExplorationProduction (WellID, WellName, Location, ProductionDate, ProductionVolume, Operator, FieldName, Reservoir, Depth, APIGravity, WaterCut, GasOilRatio)
65 | VALUES (?,?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
66 | '''
67 |
68 | # Insert record into the ExplorationProduction table
69 | cursor.execute(query, well_id,well_name, location, production_date, production_volume, operator, field_name, reservoir, depth, api_gravity, water_cut, gas_oil_ratio)
70 |
--------------------------------------------------------------------------------
/src/kernel/__init__.py:
--------------------------------------------------------------------------------
1 | from .service import Kernel
2 |
--------------------------------------------------------------------------------
/src/kernel/service.py:
--------------------------------------------------------------------------------
1 | import os
2 | import logging
3 |
4 | from azure.identity import DefaultAzureCredential
5 | from semantic_kernel import Kernel as SemanticKernel
6 | from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion
7 | from semantic_kernel.functions import KernelArguments
8 | from semantic_kernel.contents.chat_history import ChatHistory
9 | from semantic_kernel.connectors.ai.function_call_behavior import FunctionCallBehavior
10 | from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import (
11 | AzureChatPromptExecutionSettings,
12 | )
13 | from semantic_kernel.contents.author_role import AuthorRole
14 | from semantic_kernel.contents.finish_reason import FinishReason
15 |
16 | from ..database import Database
17 |
18 |
19 | logger = logging.getLogger(__name__)
20 |
21 | # see https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/managed-identity
22 | scope = 'https://cognitiveservices.azure.com/.default'
23 |
24 |
25 | class Kernel:
26 | def __init__(self, database_service: Database, credential: DefaultAzureCredential, openai_endpoint: str, openai_deployment_name: str) -> None:
27 | # Create a new kernel
28 | self.kernel = SemanticKernel()
29 | # Create a chat completion service
30 | self.chat_completion = AzureChatCompletion(ad_token=credential.get_token(scope).token, endpoint=openai_endpoint, deployment_name=openai_deployment_name)
31 |
32 | # Add Azure OpenAI chat completion
33 | self.kernel.add_service(self.chat_completion)
34 |
35 | # Add plugins located under /plugins folder
36 | parent_directory = os.path.join(__file__, "../../")
37 | init_args = {
38 | "DatabasePlugin": {
39 | "db": database_service
40 | }
41 | }
42 | self.kernel.add_plugin(parent_directory=parent_directory, plugin_name="plugins", class_init_arguments=init_args)
43 |
44 | # Enable automatic function calling
45 | self.execution_settings = AzureChatPromptExecutionSettings(tool_choice="auto")
46 | self.execution_settings.function_call_behavior = FunctionCallBehavior.EnableFunctions(auto_invoke=True, filters={})
47 |
48 | async def message(self, user_input: str, chat_history: ChatHistory) -> str:
49 | """
50 | Send a message to the kernel and get a response.
51 | """
52 | chat_history.add_user_message(user_input)
53 | chat_history_count = len(chat_history)
54 | response = await self.chat_completion.get_chat_message_contents(
55 | chat_history=chat_history,
56 | settings=self.execution_settings,
57 | kernel=self.kernel,
58 | arguments=KernelArguments(),
59 | )
60 |
61 | # print assistant/tool actions
62 | for message in chat_history[chat_history_count:]:
63 | if message.role == AuthorRole.TOOL:
64 | for item in message.items:
65 | print("tool {} called and returned {}".format(item.name, item.result))
66 | elif message.role == AuthorRole.ASSISTANT and message.finish_reason == FinishReason.TOOL_CALLS:
67 | for item in message.items:
68 | print("tool {} needs to be called with parameters {}".format(item.name, item.arguments))
69 |
70 | return str(response[0])
71 |
--------------------------------------------------------------------------------
/src/orchestrator/__init__.py:
--------------------------------------------------------------------------------
1 | from .service import Orchestrator
2 |
--------------------------------------------------------------------------------
/src/orchestrator/service.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from semantic_kernel.contents.chat_history import ChatHistory
4 |
5 | from ..speech import Speech
6 | from ..kernel import Kernel
7 |
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | class Orchestrator:
13 | def __init__(self, speech_service: Speech, kernel: Kernel) -> None:
14 | self.speech_service = speech_service
15 | self.kernel = kernel
16 |
17 | async def run(self, chat_history: ChatHistory) -> None:
18 | """
19 | Run the orchestrator
20 | """
21 | self.speech_service.synthesize("....Welcome to the Kiosk Bot!! I am here to help you with your queries. I am still learning. So, please bear with me.")
22 |
23 | while True:
24 | try:
25 | self.speech_service.synthesize("Please ask your query through the Microphone:")
26 | print("Listening:")
27 |
28 | # Collect user input
29 | user_input = self.speech_service.recognize()
30 | print("User > " + user_input)
31 |
32 | # Terminate the loop if the user says "exit"
33 | if user_input == "exit":
34 | break
35 |
36 | response = await self.kernel.message(user_input=user_input, chat_history=chat_history)
37 |
38 | print("Assistant > " + response)
39 | self.speech_service.synthesize(response)
40 |
41 | self.speech_service.synthesize("Do you have any other query? Say Yes to Continue")
42 |
43 | # Taking Input from the user
44 | print("Listening:")
45 | user_input = self.speech_service.recognize()
46 | print("User > " + user_input)
47 | if user_input != 'Yes.':
48 | self.speech_service.synthesize("Thank you for using the Kiosk Bot. Have a nice day.")
49 | break
50 | except Exception as e:
51 | logger.error("An exception occurred: {}".format(e))
52 | self.speech_service.synthesize("An error occurred. Let's try again.")
53 | continue
54 |
--------------------------------------------------------------------------------
/src/plugins/database_plugin.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import Annotated, List
3 |
4 | import pyodbc
5 | from semantic_kernel.functions.kernel_function_decorator import kernel_function
6 |
7 | from src.database.service import Database
8 |
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | class DatabasePlugin:
14 | """DatabasePlugin provides a set of functions to access the database."""
15 |
16 | def __init__(self, db: Database) -> None:
17 | self.db = db
18 |
19 | @kernel_function(name="query", description="Query the database.")
20 | def query(self, query: Annotated[str, "The SQL query"]) -> Annotated[List[pyodbc.Row], "The rows returned"]:
21 | logger.info("Running database plugin with query: {}".format(query))
22 | return self.db.query(query)
23 |
--------------------------------------------------------------------------------
/src/plugins/nlp_to_sql/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "schema": 1,
3 | "type": "completion",
4 | "description": "Given a database natural language query, convert it to SQL.",
5 | "completion": {
6 | "max_tokens": 200,
7 | "temperature": 0,
8 | "top_p": 0,
9 | "presence_penalty": 0.0,
10 | "frequency_penalty": 0.0
11 | },
12 | "input_variables": [
13 | {
14 | "name": "input",
15 | "description": "natural language query to convert to SQL",
16 | "required": true
17 | }
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/src/plugins/nlp_to_sql/skprompt.txt:
--------------------------------------------------------------------------------
1 | ### SQL SERVER SQL tables, with their properties:
2 | #
3 | # ExplorationProduction (WellID, WellName, Location, ProductionDate, ProductionVolume, Operator, FieldName, Reservoir, Depth, APIGravity, WaterCut, GasOilRatio)
4 | #
5 | ### A SQL query to find {{$input}}. Please provide only one SQL query and nothing else in a single string. Do not prompt anything else apart from the sql query.
6 | User: How many locations are there?
7 | Assistant: SELECT COUNT(DISTINCT Location) AS NumberOfLocations FROM ExplorationProduction;
8 | User: {{$input}}
9 | Assistant:
--------------------------------------------------------------------------------
/src/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azure-Samples/nlp-sql-in-a-box/b59b6b02fd5e468a51a90c3da4c7a24c483c4d6f/src/requirements.txt
--------------------------------------------------------------------------------
/src/speech/__init__.py:
--------------------------------------------------------------------------------
1 | from .service import Speech
2 |
--------------------------------------------------------------------------------
/src/speech/service.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from azure.identity import DefaultAzureCredential
4 | from azure.cognitiveservices.speech import SpeechConfig, SpeechRecognizer,SpeechSynthesizer, ResultReason, CancellationReason
5 | from azure.cognitiveservices.speech.audio import AudioConfig, AudioOutputConfig
6 |
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 | # see https://learn.microsoft.com/en-us/azure/ai-services/speech-service/how-to-configure-azure-ad-auth?tabs=portal&pivots=programming-language-python#get-a-microsoft-entra-access-token
11 | scope = 'https://cognitiveservices.azure.com/.default'
12 |
13 |
14 | class Speech:
15 | def __init__(self, credential: DefaultAzureCredential, resource_id: str, region: str) -> None:
16 | auth_token = 'aad#{}#{}'.format(resource_id, credential.get_token(scope).token)
17 | speech_config = SpeechConfig(auth_token=auth_token, region=region, speech_recognition_language="en-US")
18 |
19 | self._recognizer = SpeechRecognizer(speech_config=speech_config, audio_config=AudioConfig(use_default_microphone=True))
20 | self._synthesizer = SpeechSynthesizer(speech_config=speech_config, audio_config=AudioOutputConfig(use_default_speaker=True))
21 |
22 | def recognize(self) -> str:
23 | """
24 | Recognize speech from the microphone and convert it to text
25 | """
26 | response = self._recognizer.recognize_once()
27 |
28 | reason = response.reason
29 | if reason != ResultReason.RecognizedSpeech:
30 | error = 'Failed to recognize speech.'
31 | if reason == ResultReason.NoMatch:
32 | error = "No speech could be recognized: {}".format(response.no_match_details)
33 | elif reason == ResultReason.Canceled:
34 | cancellation_details = response.cancellation_details
35 | error = "Speech Recognition canceled: {}".format(cancellation_details.reason)
36 | if cancellation_details.reason == CancellationReason.Error:
37 | error += "Error details: {}".format(cancellation_details.error_details)
38 | raise Exception("Speech recognition failed with error: {}".format(error))
39 |
40 | logger.info("Recognized text: {}".format(response.text))
41 |
42 | return response.text
43 |
44 | def synthesize(self, text: str) -> None:
45 | """
46 | Synthesize text to speech and play it through the speaker
47 | """
48 | response = self._synthesizer.speak_text(text)
49 |
50 | if response.reason != ResultReason.SynthesizingAudioCompleted:
51 | cancellation_details = response.cancellation_details
52 | error = "Speech synthesis canceled: {}".format(cancellation_details.reason)
53 | if cancellation_details.reason == CancellationReason.Error:
54 | if cancellation_details.error_details:
55 | error += "Error details: {}".format(cancellation_details.error_details)
56 | raise Exception("Speech synthesis failed with error: {}".format(error))
57 |
58 | logger.info("Speech synthesized for text [{}]".format(text))
59 |
--------------------------------------------------------------------------------