├── .dccache
├── .devcontainer
├── Dockerfile
└── devcontainer.json
├── .github
├── dependabot.yml
└── workflows
│ ├── module1-azure-function-workflow.yaml
│ └── module1-infra-workflow.yml
├── .gitignore
├── .vscode
└── settings.json
├── CODE_OF_CONDUCT.md
├── LICENSE
├── README.md
├── SECURITY.md
├── SUPPORT.md
├── docs
├── .gitignore
├── 404.html
├── Gemfile
├── Gemfile.lock
├── _config.yml
├── _posts
│ └── 2022-03-15-welcome-to-jekyll.markdown
├── assets
│ ├── images
│ │ ├── module0
│ │ │ ├── aks-devsecops-architecture.png
│ │ │ └── devsecops-lifycycle-phases.png
│ │ ├── module1
│ │ │ ├── codespaces-new-terminal.png
│ │ │ ├── curl-localhost.png
│ │ │ ├── module1-create-event-subscription.png
│ │ │ ├── module1-function-app-settings.png
│ │ │ ├── screenshot-localhost.png
│ │ │ └── transaction-classification-application-architecture.png
│ │ ├── module2
│ │ │ ├── appcomponents.png
│ │ │ ├── chatscreen.png
│ │ │ ├── cog_resources.png
│ │ │ └── endpoint.png
│ │ └── module3
│ │ │ ├── book-creator-sequence.png
│ │ │ ├── bookcreator1.png
│ │ │ ├── bookcreator2.png
│ │ │ ├── bookcreator3.png
│ │ │ ├── semantic-function.png
│ │ │ ├── sk-bcplay.png
│ │ │ ├── sk-bookcreator.png
│ │ │ ├── sk-bookcstart.png
│ │ │ ├── sk-pipeline.png
│ │ │ ├── sk-use-cases.png
│ │ │ └── sk-why.png
│ └── slides
│ │ └── Intro.pptx
├── index.markdown
└── modules
│ ├── Module0
│ ├── index.md
│ ├── intro.md
│ └── prereq.md
│ ├── Module1
│ ├── Lab-1.md
│ ├── index.md
│ └── intro.md
│ ├── Module2
│ ├── Lab-2.md
│ ├── index.md
│ └── intro.md
│ └── Module3
│ ├── bookcreatorflow.md
│ ├── index.md
│ ├── intro.md
│ └── lab-module3.md
└── tools
└── deploy
├── module0
├── aad-federated-cred.sh
└── credential.json
├── module1
├── TransactionClassifier
│ ├── .gitignore
│ ├── Properties
│ │ ├── ServiceDependencies
│ │ │ └── xact-classifier-openai - Zip Deploy
│ │ │ │ ├── appInsights1.arm.json
│ │ │ │ └── storage1.arm.json
│ │ ├── launchSettings.json
│ │ ├── serviceDependencies.json
│ │ ├── serviceDependencies.local.json
│ │ └── serviceDependencies.xact-classifier-openai - Zip Deploy.json
│ ├── TransactionClassifier.cs
│ ├── TransactionClassifier.csproj
│ └── host.json
├── data
│ └── 25000_spend_dataset_current_25.csv
└── infra
│ └── module1-infra.bicep
└── module3
├── pom.xml
└── src
├── main
├── java
│ └── com
│ │ └── github
│ │ └── demo
│ │ ├── DemoServer.java
│ │ ├── model
│ │ └── Book.java
│ │ ├── service
│ │ ├── BookDatabase.java
│ │ ├── BookDatabaseImpl.java
│ │ ├── BookService.java
│ │ ├── BookServiceException.java
│ │ └── BookUtils.java
│ │ └── servlet
│ │ ├── BookServlet.java
│ │ └── StatusServlet.java
├── resources
│ ├── log4j2.xml
│ └── version.properties
└── webapp
│ └── static
│ ├── books.html
│ ├── books_de.properties
│ ├── books_en.properties
│ ├── books_fr.properties
│ ├── books_nl.properties
│ ├── bootstrap-3.4.1-dist
│ ├── css
│ │ ├── bootstrap-theme.css
│ │ ├── bootstrap-theme.css.map
│ │ ├── bootstrap-theme.min.css
│ │ ├── bootstrap-theme.min.css.map
│ │ ├── bootstrap.css
│ │ ├── bootstrap.css.map
│ │ ├── bootstrap.min.css
│ │ └── bootstrap.min.css.map
│ ├── fonts
│ │ ├── glyphicons-halflings-regular.eot
│ │ ├── glyphicons-halflings-regular.svg
│ │ ├── glyphicons-halflings-regular.ttf
│ │ ├── glyphicons-halflings-regular.woff
│ │ └── glyphicons-halflings-regular.woff2
│ └── js
│ │ ├── bootstrap.js
│ │ ├── bootstrap.min.js
│ │ └── npm.js
│ └── images
│ ├── covers
│ ├── chasm.jpg
│ ├── lean.jpg
│ ├── month.jpg
│ ├── pragmatic.jpg
│ ├── scrum.jpg
│ └── think.jpg
│ ├── favicon.ico
│ ├── heart.png
│ ├── invertocat.svg
│ ├── octocat.png
│ └── star.png
└── test
└── java
└── com
└── github
└── demo
├── model
└── BookTest.java
└── service
└── BookServiceTest.java
/.dccache:
--------------------------------------------------------------------------------
1 | {"c:/Users/adnankhan/dev/AKS-DevSecOps-Workshop/docs/404.html":[444,1675714544701.9653,"79d3f18e6d7254b216adc7a98da02fbc0a8211a4fa79cae5c0d4adb866f6d662"],"c:\\Users\\adnankhan\\dev\\AKS-DevSecOps-Workshop\\docs\\404.html":[444,1675714544701.9653,"79d3f18e6d7254b216adc7a98da02fbc0a8211a4fa79cae5c0d4adb866f6d662"]}
--------------------------------------------------------------------------------
/.devcontainer/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG VARIANT=bullseye
2 | FROM --platform=amd64 mcr.microsoft.com/vscode/devcontainers/base:0-${VARIANT}
3 | RUN wget https://packages.microsoft.com/config/ubuntu/18.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb \
4 | && dpkg -i packages-microsoft-prod.deb
5 | RUN export DEBIAN_FRONTEND=noninteractive \
6 | && apt-get update && apt-get install -y xdg-utils dotnet-sdk-7.0\
7 | && apt-get clean -y && rm -rf /var/lib/apt/lists/*
8 | RUN curl -fsSL https://aka.ms/install-azd.sh | bash
9 | # RUN apt-get install -y dotnet-sdk-6.0
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Azure Developer CLI",
3 | "build": {
4 | "dockerfile": "Dockerfile",
5 | "args": {
6 | "VARIANT": "bullseye"
7 | }
8 | },
9 | "features": {
10 | "ghcr.io/devcontainers/features/python:1": {
11 | "version": "os-provided"
12 | },
13 | "ghcr.io/devcontainers/features/node:1": {
14 | "version": "16",
15 | "nodeGypDependencies": false
16 | },
17 | "ghcr.io/devcontainers/features/powershell:1.1.0": {},
18 |
19 | "ghcr.io/devcontainers/features/azure-cli:1.0.8": {},
20 |
21 | "ghcr.io/devcontainers/features/dotnet:1": {
22 | "version": "6.0" },
23 |
24 |
25 | "ghcr.io/jlaundry/devcontainer-features/azure-functions-core-tools:1": {}
26 | },
27 | "customizations": {
28 | "vscode": {
29 | "extensions": [
30 | "ms-dotnettools.dotnet-interactive-vscode",
31 | "ms-semantic-kernel.semantic-kernel",
32 | "esbenp.prettier-vscode",
33 | "ms-azuretools.azure-dev",
34 | "ms-azuretools.vscode-bicep",
35 | "ms-python.python"
36 | ]
37 | }
38 | },
39 | "forwardPorts": [
40 | 5000
41 | ],
42 | "postCreateCommand": "",
43 | "remoteUser": "vscode",
44 | "hostRequirements": {
45 | "memory": "8gb"
46 | }
47 | }
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "maven" # See documentation for possible values
9 | directory: "/tools/deploy/module3" # Location of package manifests
10 | schedule:
11 | interval: "weekly"
12 |
--------------------------------------------------------------------------------
/.github/workflows/module1-azure-function-workflow.yaml:
--------------------------------------------------------------------------------
1 | name: Deploy Azure Function App
2 |
3 | on:
4 | workflow_dispatch:
5 | push:
6 | branches:
7 | - main
8 | paths:
9 | - 'tools/deploy/module1/**'
10 | - '.github/workflows/module1-azure-function-workflow.yaml'
11 |
12 | env:
13 | AZURE_FUNCTIONAPP_PACKAGE_PATH: 'tools/deploy/module1/TransactionClassifier/' # set this to the path to your function app project, defaults to the repository root
14 | DOTNET_VERSION: '6.0.x' # set this to the dotnet version to use (e.g. '2.1.x', '3.1.x', '5.0.x')
15 | AZURE_RESOURCE_GROUP: ${{ secrets.AZURE_RESOURCE_GROUP }}
16 |
17 | permissions:
18 | id-token: write # This is required for requesting the JWT
19 | contents: read # This is required for actions/checkout
20 |
21 | jobs:
22 | build-and-deploy:
23 | runs-on: ubuntu-latest
24 | steps:
25 | - name: 'Checkout GitHub Action'
26 | uses: actions/checkout@main
27 |
28 | - name: 'Login via Azure CLI'
29 | uses: azure/login@v1
30 | with:
31 | client-id: ${{ secrets.AZURE_CLIENT_ID }}
32 | tenant-id: ${{ secrets.AZURE_TENANT_ID }}
33 | subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
34 |
35 | - name: 'retrieve the Function App name'
36 | id: get_functionapp_name
37 | uses: azure/CLI@v1
38 | with:
39 | azcliversion: latest
40 | inlineScript: |
41 | echo "appName=$(az functionapp list --resource-group ${{ env.AZURE_RESOURCE_GROUP }} --query "[].name" -o tsv)" >> $GITHUB_OUTPUT
42 |
43 | - name: Setup DotNet ${{ env.DOTNET_VERSION }} Environment
44 | uses: actions/setup-dotnet@v3
45 | with:
46 | dotnet-version: ${{ env.DOTNET_VERSION }}
47 |
48 | - name: 'Resolve Project Dependencies Using Dotnet'
49 | shell: bash
50 | run: |
51 | pushd './${{ env.AZURE_FUNCTIONAPP_PACKAGE_PATH }}'
52 | dotnet build --configuration Release --output ./output
53 | popd
54 |
55 | - name: 'Run Azure Functions Action'
56 | uses: Azure/functions-action@v1
57 | id: fa
58 | with:
59 | # set app_name equal to output from "retrieve the Function App name" step
60 | app-name: ${{ steps.get_functionapp_name.outputs.appname }}
61 | package: '${{ env.AZURE_FUNCTIONAPP_PACKAGE_PATH }}/output'
62 |
63 | # For more samples to get started with GitHub Action workflows to deploy to Azure, refer to https://github.com/Azure/actions-workflow-samples
--------------------------------------------------------------------------------
/.github/workflows/module1-infra-workflow.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Module 1 Infrastructure
2 | on:
3 | push:
4 | branches:
5 | - main
6 | paths:
7 | - 'tools/deploy/module1/**'
8 | - 'module1-infra-workflow.yaml'
9 |
10 | workflow_dispatch:
11 | env:
12 | AZURE_RESOURCE_GROUP: ${{ secrets.AZURE_RESOURCE_GROUP }}
13 |
14 | permissions:
15 | id-token: write # This is required for requesting the JWT
16 | contents: read # This is required for actions/checkout
17 | jobs:
18 | build-and-deploy:
19 | runs-on: ubuntu-latest
20 | steps:
21 |
22 | # Checkout code
23 | - uses: actions/checkout@main
24 |
25 | # Log into Azure
26 | - uses: azure/login@v1
27 | with:
28 | client-id: ${{ secrets.AZURE_CLIENT_ID }}
29 | tenant-id: ${{ secrets.AZURE_TENANT_ID }}
30 | subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
31 | # creds: ${{ secrets.AZURE_CREDENTIALS }}
32 |
33 | # Deploy Bicep file
34 | - name: deploy
35 | uses: azure/arm-deploy@v1
36 | with:
37 | subscriptionId: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
38 | resourceGroupName: ${{ secrets.AZURE_RESOURCE_GROUP }}
39 | template: tools/deploy/module1/infra/module1-infra.bicep
40 | # parameters: tools/deploy/module0/azuredeploy.parameters.json
41 | failOnStdErr: false
42 |
43 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Mono auto generated files
17 | mono_crash.*
18 |
19 | # Build results
20 | [Dd]ebug/
21 | [Dd]ebugPublic/
22 | [Rr]elease/
23 | [Rr]eleases/
24 | x64/
25 | x86/
26 | [Aa][Rr][Mm]/
27 | [Aa][Rr][Mm]64/
28 | bld/
29 | [Bb]in/
30 | [Oo]bj/
31 | [Ll]og/
32 | [Ll]ogs/
33 |
34 | # Visual Studio 2015/2017 cache/options directory
35 | .vs/
36 | # Uncomment if you have tasks that create the project's static files in wwwroot
37 | #wwwroot/
38 |
39 | # Visual Studio 2017 auto generated files
40 | Generated\ Files/
41 |
42 | # MSTest test Results
43 | [Tt]est[Rr]esult*/
44 | [Bb]uild[Ll]og.*
45 |
46 | # NUnit
47 | *.VisualState.xml
48 | TestResult.xml
49 | nunit-*.xml
50 |
51 | # Build Results of an ATL Project
52 | [Dd]ebugPS/
53 | [Rr]eleasePS/
54 | dlldata.c
55 |
56 | # Benchmark Results
57 | BenchmarkDotNet.Artifacts/
58 |
59 | # .NET Core
60 | project.lock.json
61 | project.fragment.lock.json
62 | artifacts/
63 |
64 | # StyleCop
65 | StyleCopReport.xml
66 |
67 | # Files built by Visual Studio
68 | *_i.c
69 | *_p.c
70 | *_h.h
71 | *.ilk
72 | *.meta
73 | *.obj
74 | *.iobj
75 | *.pch
76 | *.pdb
77 | *.ipdb
78 | *.pgc
79 | *.pgd
80 | *.rsp
81 | *.sbr
82 | *.tlb
83 | *.tli
84 | *.tlh
85 | *.tmp
86 | *.tmp_proj
87 | *_wpftmp.csproj
88 | *.log
89 | *.vspscc
90 | *.vssscc
91 | .builds
92 | *.pidb
93 | *.svclog
94 | *.scc
95 |
96 | # Chutzpah Test files
97 | _Chutzpah*
98 |
99 | # Visual C++ cache files
100 | ipch/
101 | *.aps
102 | *.ncb
103 | *.opendb
104 | *.opensdf
105 | *.sdf
106 | *.cachefile
107 | *.VC.db
108 | *.VC.VC.opendb
109 |
110 | # Visual Studio profiler
111 | *.psess
112 | *.vsp
113 | *.vspx
114 | *.sap
115 |
116 | # Visual Studio Trace Files
117 | *.e2e
118 |
119 | # TFS 2012 Local Workspace
120 | $tf/
121 |
122 | # Guidance Automation Toolkit
123 | *.gpState
124 |
125 | # ReSharper is a .NET coding add-in
126 | _ReSharper*/
127 | *.[Rr]e[Ss]harper
128 | *.DotSettings.user
129 |
130 | # TeamCity is a build add-in
131 | _TeamCity*
132 |
133 | # DotCover is a Code Coverage Tool
134 | *.dotCover
135 |
136 | # AxoCover is a Code Coverage Tool
137 | .axoCover/*
138 | !.axoCover/settings.json
139 |
140 | # Visual Studio code coverage results
141 | *.coverage
142 | *.coveragexml
143 |
144 | # NCrunch
145 | _NCrunch_*
146 | .*crunch*.local.xml
147 | nCrunchTemp_*
148 |
149 | # MightyMoose
150 | *.mm.*
151 | AutoTest.Net/
152 |
153 | # Web workbench (sass)
154 | .sass-cache/
155 |
156 | # Installshield output folder
157 | [Ee]xpress/
158 |
159 | # DocProject is a documentation generator add-in
160 | DocProject/buildhelp/
161 | DocProject/Help/*.HxT
162 | DocProject/Help/*.HxC
163 | DocProject/Help/*.hhc
164 | DocProject/Help/*.hhk
165 | DocProject/Help/*.hhp
166 | DocProject/Help/Html2
167 | DocProject/Help/html
168 |
169 | # Click-Once directory
170 | publish/
171 |
172 | # Publish Web Output
173 | *.[Pp]ublish.xml
174 | *.azurePubxml
175 | # Note: Comment the next line if you want to checkin your web deploy settings,
176 | # but database connection strings (with potential passwords) will be unencrypted
177 | *.pubxml
178 | *.publishproj
179 |
180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
181 | # checkin your Azure Web App publish settings, but sensitive information contained
182 | # in these scripts will be unencrypted
183 | PublishScripts/
184 |
185 | # NuGet Packages
186 | *.nupkg
187 | # NuGet Symbol Packages
188 | *.snupkg
189 | # The packages folder can be ignored because of Package Restore
190 | **/[Pp]ackages/*
191 | # except build/, which is used as an MSBuild target.
192 | !**/[Pp]ackages/build/
193 | # Uncomment if necessary however generally it will be regenerated when needed
194 | #!**/[Pp]ackages/repositories.config
195 | # NuGet v3's project.json files produces more ignorable files
196 | *.nuget.props
197 | *.nuget.targets
198 |
199 | # Microsoft Azure Build Output
200 | csx/
201 | *.build.csdef
202 |
203 | # Microsoft Azure Emulator
204 | ecf/
205 | rcf/
206 |
207 | # Windows Store app package directories and files
208 | AppPackages/
209 | BundleArtifacts/
210 | Package.StoreAssociation.xml
211 | _pkginfo.txt
212 | *.appx
213 | *.appxbundle
214 | *.appxupload
215 |
216 | # Visual Studio cache files
217 | # files ending in .cache can be ignored
218 | *.[Cc]ache
219 | # but keep track of directories ending in .cache
220 | !?*.[Cc]ache/
221 |
222 | # Others
223 | ClientBin/
224 | ~$*
225 | *~
226 | *.dbmdl
227 | *.dbproj.schemaview
228 | *.jfm
229 | *.pfx
230 | *.publishsettings
231 | orleans.codegen.cs
232 |
233 | # Including strong name files can present a security risk
234 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
235 | #*.snk
236 |
237 | # Since there are multiple workflows, uncomment next line to ignore bower_components
238 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
239 | #bower_components/
240 |
241 | # RIA/Silverlight projects
242 | Generated_Code/
243 |
244 | # Backup & report files from converting an old project file
245 | # to a newer Visual Studio version. Backup files are not needed,
246 | # because we have git ;-)
247 | _UpgradeReport_Files/
248 | Backup*/
249 | UpgradeLog*.XML
250 | UpgradeLog*.htm
251 | ServiceFabricBackup/
252 | *.rptproj.bak
253 |
254 | # SQL Server files
255 | *.mdf
256 | *.ldf
257 | *.ndf
258 |
259 | # Business Intelligence projects
260 | *.rdl.data
261 | *.bim.layout
262 | *.bim_*.settings
263 | *.rptproj.rsuser
264 | *- [Bb]ackup.rdl
265 | *- [Bb]ackup ([0-9]).rdl
266 | *- [Bb]ackup ([0-9][0-9]).rdl
267 |
268 | # Microsoft Fakes
269 | FakesAssemblies/
270 |
271 | # GhostDoc plugin setting file
272 | *.GhostDoc.xml
273 |
274 | # Node.js Tools for Visual Studio
275 | .ntvs_analysis.dat
276 | node_modules/
277 |
278 | # Visual Studio 6 build log
279 | *.plg
280 |
281 | # Visual Studio 6 workspace options file
282 | *.opt
283 |
284 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
285 | *.vbw
286 |
287 | # Visual Studio LightSwitch build output
288 | **/*.HTMLClient/GeneratedArtifacts
289 | **/*.DesktopClient/GeneratedArtifacts
290 | **/*.DesktopClient/ModelManifest.xml
291 | **/*.Server/GeneratedArtifacts
292 | **/*.Server/ModelManifest.xml
293 | _Pvt_Extensions
294 |
295 | # Paket dependency manager
296 | .paket/paket.exe
297 | paket-files/
298 |
299 | # FAKE - F# Make
300 | .fake/
301 |
302 | # CodeRush personal settings
303 | .cr/personal
304 |
305 | # Python Tools for Visual Studio (PTVS)
306 | __pycache__/
307 | *.pyc
308 |
309 | # Cake - Uncomment if you are using it
310 | # tools/**
311 | # !tools/packages.config
312 |
313 | # Tabs Studio
314 | *.tss
315 |
316 | # Telerik's JustMock configuration file
317 | *.jmconfig
318 |
319 | # BizTalk build output
320 | *.btp.cs
321 | *.btm.cs
322 | *.odx.cs
323 | *.xsd.cs
324 |
325 | # OpenCover UI analysis results
326 | OpenCover/
327 |
328 | # Azure Stream Analytics local run output
329 | ASALocalRun/
330 |
331 | # MSBuild Binary and Structured Log
332 | *.binlog
333 |
334 | # NVidia Nsight GPU debugger configuration file
335 | *.nvuser
336 |
337 | # MFractors (Xamarin productivity tool) working folder
338 | .mfractor/
339 |
340 | # Local History for Visual Studio
341 | .localhistory/
342 |
343 | # BeatPulse healthcheck temp database
344 | healthchecksdb
345 |
346 | # Backup folder for Package Reference Convert tool in Visual Studio 2017
347 | MigrationBackup/
348 |
349 | # Ionide (cross platform F# VS Code tools) working folder
350 | .ionide/
351 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "azureFunctions.projectSubpath": "tools/deploy/module2/TransactionClassifier"
3 | }
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Microsoft Open Source Code of Conduct
2 |
3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4 |
5 | Resources:
6 |
7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Azure OpenAI API Hands-on LevelUp Labs
2 |
3 | Welcome to the **Azure OpenAI API Hands-on LevelUp Labs** repository! This repository contains three hands-on labs that will help you build intelligent applications using Azure OpenAI API.
4 |
5 | ## Lab 1: Transaction Classification Application
6 |
7 | This lab demonstrates how to classify transaction records using OpenAI's GPT-3 language model and Azure Storage, Azure Event Grid, and Azure Function services. You will learn how to build an intelligent transaction classification app that can automatically categorize financial transactions.
8 |
9 | ## Lab 2: ChatGPT + Enterprise Data with OpenAI and Azure Cognitive Search
10 | This lab shows how to use Azure OpenAI Service to access the ChatGPT model (gpt-35-turbo) and Azure Cognitive Search for data indexing and retrieval. You will learn how to build a chatbot that can answer questions using enterprise data.
11 |
12 | ## Lab 3: Book Creator Sample Learning App
13 |
14 | This lab demonstrates how to use the [semantic kernel](https://github.com/microsoft/semantic-kernel) framework to integrate OpenAI's Large Language Models (LLMs) with conventional programming languages. You will learn how to build an intelligent book creator app that can generate new content based on existing text.
15 |
16 | ## Getting Started
17 |
18 | To get started with these labs, please follow the instructions [here](https://ahmedbham.github.io/azure-openai-api-levelup/)
19 |
20 | ## Trademarks
21 |
22 | This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
23 | trademarks or logos is subject to and must follow [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies.
24 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Security
4 |
5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
6 |
7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below.
8 |
9 | ## Reporting Security Issues
10 |
11 | **Please do not report security vulnerabilities through public GitHub issues.**
12 |
13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
14 |
15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey).
16 |
17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc).
18 |
19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
20 |
21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
22 | * Full paths of source file(s) related to the manifestation of the issue
23 | * The location of the affected source code (tag/branch/commit or direct URL)
24 | * Any special configuration required to reproduce the issue
25 | * Step-by-step instructions to reproduce the issue
26 | * Proof-of-concept or exploit code (if possible)
27 | * Impact of the issue, including how an attacker might exploit the issue
28 |
29 | This information will help us triage your report more quickly.
30 |
31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs.
32 |
33 | ## Preferred Languages
34 |
35 | We prefer all communications to be in English.
36 |
37 | ## Policy
38 |
39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd).
40 |
41 |
42 |
--------------------------------------------------------------------------------
/SUPPORT.md:
--------------------------------------------------------------------------------
1 | # TODO: The maintainer of this repo has not yet edited this file
2 |
3 | **REPO OWNER**: Do you want Customer Service & Support (CSS) support for this product/project?
4 |
5 | - **No CSS support:** Fill out this template with information about how to file issues and get help.
6 | - **Yes CSS support:** Fill out an intake form at [aka.ms/onboardsupport](https://aka.ms/onboardsupport). CSS will work with/help you to determine next steps.
7 | - **Not sure?** Fill out an intake as though the answer were "Yes". CSS will help you decide.
8 |
9 | *Then remove this first heading from this SUPPORT.MD file before publishing your repo.*
10 |
11 | # Support
12 |
13 | ## How to file issues and get help
14 |
15 | This project uses GitHub Issues to track bugs and feature requests. Please search the existing
16 | issues before filing new issues to avoid duplicates. For new issues, file your bug or
17 | feature request as a new Issue.
18 |
19 | For help and questions about using this project, please **REPO MAINTAINER: INSERT INSTRUCTIONS HERE
20 | FOR HOW TO ENGAGE REPO OWNERS OR COMMUNITY FOR HELP. COULD BE A STACK OVERFLOW TAG OR OTHER
21 | CHANNEL. WHERE WILL YOU HELP PEOPLE?**.
22 |
23 | ## Microsoft Support Policy
24 |
25 | Support for this **PROJECT or PRODUCT** is limited to the resources listed above.
26 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | _site
2 | .sass-cache
3 | .jekyll-cache
4 | .jekyll-metadata
5 | vendor
6 |
--------------------------------------------------------------------------------
/docs/404.html:
--------------------------------------------------------------------------------
1 | ---
2 | permalink: /404.html
3 | layout: default
4 | ---
5 |
6 |
19 |
20 |
21 |
404
22 |
23 |
Page not found :(
24 |
The requested page could not be found.
25 |
26 |
--------------------------------------------------------------------------------
/docs/Gemfile:
--------------------------------------------------------------------------------
1 | source 'https://rubygems.org'
2 |
3 | gem "jekyll", "~> 4.3" # installed by `gem jekyll`
4 | # gem "webrick" # required when using Ruby >= 3 and Jekyll <= 4.2.2
5 |
6 | gem "just-the-docs", "0.4.0.rc4" # currently the latest pre-release
7 | # gem "just-the-docs" # the latest release - currently 0.3.3
8 |
--------------------------------------------------------------------------------
/docs/Gemfile.lock:
--------------------------------------------------------------------------------
1 | GEM
2 | remote: https://rubygems.org/
3 | specs:
4 | addressable (2.8.1)
5 | public_suffix (>= 2.0.2, < 6.0)
6 | colorator (1.1.0)
7 | concurrent-ruby (1.1.10)
8 | em-websocket (0.5.3)
9 | eventmachine (>= 0.12.9)
10 | http_parser.rb (~> 0)
11 | eventmachine (1.2.7)
12 | ffi (1.15.5)
13 | forwardable-extended (2.6.0)
14 | http_parser.rb (0.8.0)
15 | i18n (1.12.0)
16 | concurrent-ruby (~> 1.0)
17 | jekyll (4.3.0)
18 | addressable (~> 2.4)
19 | colorator (~> 1.0)
20 | em-websocket (~> 0.5)
21 | i18n (~> 1.0)
22 | jekyll-sass-converter (>= 2.0, < 4.0)
23 | jekyll-watch (~> 2.0)
24 | kramdown (~> 2.3, >= 2.3.1)
25 | kramdown-parser-gfm (~> 1.0)
26 | liquid (~> 4.0)
27 | mercenary (>= 0.3.6, < 0.5)
28 | pathutil (~> 0.9)
29 | rouge (>= 3.0, < 5.0)
30 | safe_yaml (~> 1.0)
31 | terminal-table (>= 1.8, < 4.0)
32 | webrick (~> 1.7)
33 | jekyll-sass-converter (2.2.0)
34 | sassc (> 2.0.1, < 3.0)
35 | jekyll-seo-tag (2.8.0)
36 | jekyll (>= 3.8, < 5.0)
37 | jekyll-watch (2.2.1)
38 | listen (~> 3.0)
39 | just-the-docs (0.4.0.rc4)
40 | jekyll (>= 3.8.5)
41 | jekyll-seo-tag (>= 2.0)
42 | rake (>= 12.3.1)
43 | kramdown (2.4.0)
44 | rexml
45 | kramdown-parser-gfm (1.1.0)
46 | kramdown (~> 2.0)
47 | liquid (4.0.3)
48 | listen (3.7.1)
49 | rb-fsevent (~> 0.10, >= 0.10.3)
50 | rb-inotify (~> 0.9, >= 0.9.10)
51 | mercenary (0.4.0)
52 | pathutil (0.16.2)
53 | forwardable-extended (~> 2.6)
54 | public_suffix (5.0.0)
55 | rake (13.0.6)
56 | rb-fsevent (0.11.2)
57 | rb-inotify (0.10.1)
58 | ffi (~> 1.0)
59 | rexml (3.2.5)
60 | rouge (4.0.0)
61 | safe_yaml (1.0.5)
62 | sassc (2.4.0)
63 | ffi (~> 1.9)
64 | terminal-table (3.0.2)
65 | unicode-display_width (>= 1.1.1, < 3)
66 | unicode-display_width (2.3.0)
67 | webrick (1.7.0)
68 |
69 | PLATFORMS
70 | arm64-darwin-21
71 | x86_64-darwin-19
72 | x86_64-linux
73 |
74 | DEPENDENCIES
75 | jekyll (~> 4.3)
76 | just-the-docs (= 0.4.0.rc4)
77 |
78 | BUNDLED WITH
79 | 2.3.9
80 |
81 |
--------------------------------------------------------------------------------
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | # Welcome to Jekyll!
2 | #
3 | # This config file is meant for settings that affect your whole blog, values
4 | # which you are expected to set up once and rarely edit after that. If you find
5 | # yourself editing this file very often, consider using Jekyll's data files
6 | # feature for the data you need to update frequently.
7 | #
8 | # For technical reasons, this file is *NOT* reloaded automatically when you use
9 | # 'bundle exec jekyll serve'. If you change this file, please restart the server process.
10 | #
11 | # If you need help with YAML syntax, here are some quick references for you:
12 | # https://learn-the-web.algonquindesign.ca/topics/markdown-yaml-cheat-sheet/#yaml
13 | # https://learnxinyminutes.com/docs/yaml/
14 | #
15 | # Site settings
16 | # These are used to personalize your new site. If you look in the HTML files,
17 | # you will see them accessed via {{ site.title }}, {{ site.email }}, and so on.
18 | # You can create any custom variable you would like, and they will be accessible
19 | # in the templates via {{ site.myvariable }}.
20 |
21 | title: Azure OpenAI API Hands-on LevelUp Labs
22 | #email: your-email@example.com
23 | description: >- # this means to ignore newlines until "baseurl:"
24 | Hands-on Azure OpenAI API Hands-on LevelUp Labs!
25 | #baseurl: "" # the subpath of your site, e.g. /blog
26 | #url: "" # the base hostname & protocol for your site, e.g. http://example.com
27 | #twitter_username: jekyllrb
28 | #github_username: jekyll
29 |
30 |
31 | search_enabled: true
32 | color_scheme: "light"
33 | remote_theme: pmarsceill/just-the-docs
34 | search:
35 | # Split pages into sections that can be searched individually
36 | # Supports 1 - 6, default: 2
37 | heading_level: 2
38 | # Maximum amount of previews per search result
39 | # Default: 3
40 | previews: 3
41 | # Maximum amount of words to display before a matched word in the preview
42 | # Default: 5
43 | preview_words_before: 5
44 | # Maximum amount of words to display after a matched word in the preview
45 | # Default: 10
46 | preview_words_after: 10
47 | # Set the search token separator
48 | # Default: /[\s\-/]+/
49 | # Example: enable support for hyphenated search words
50 | tokenizer_separator: /[\s/]+/
51 | # Display the relative url in search results
52 | # Supports true (default) or false
53 | rel_url: true
54 | # Enable or disable the search button that appears in the bottom right corner of every page
55 | # Supports true or false (default)
56 | button: false
57 | # Exclude from processing.
58 | # The following items will not be processed, by default.
59 | # Any item listed under the `exclude:` key here will be automatically added to
60 | # the internal "default list".
61 | #
62 | # Excluded items can be processed by explicitly listing the directories or
63 | # their entries' file path in the `include:` list.
64 | #
65 | # exclude:
66 | # - .sass-cache/
67 | # - .jekyll-cache/
68 | # - gemfiles/
69 | # - Gemfile
70 | # - Gemfile.lock
71 | # - node_modules/
72 | # - vendor/bundle/
73 | # - vendor/cache/
74 | # - vendor/gems/
75 | # - vendor/ruby/
76 | # Footer content
77 | # appears at the bottom of every page's main content
78 | # Note: The footer_content option is deprecated and will be removed in a future major release. Please use `_includes/footer_custom.html` for more robust
79 | footer_content: "Distributed by an MIT license."
80 |
81 | # Footer last edited timestamp
82 | last_edit_timestamp: true # show or hide edit time - page must have `last_modified_date` defined in the frontmatter
83 | last_edit_time_format: "%b %e %Y at %I:%M %p" # uses ruby's time format: https://ruby-doc.org/stdlib-2.7.0/libdoc/time/rdoc/Time.html
84 |
85 | # Footer "Edit this page on GitHub" link text
86 | gh_edit_link: true # show or hide edit this page link
87 | gh_edit_link_text: "Edit this page on GitHub."
88 | gh_edit_repository: "https://github.com/ahmedbham/azure-openai-api-levelup" # the github URL for your repo
89 | gh_edit_branch: "main" # the branch that your docs is served from
90 | # gh_edit_source: docs # the source that your files originate from
91 | gh_edit_view_mode: "tree" # "tree" or "edit" if you want the user to jump into the editor immediately
92 |
93 | exclude: ["node_modules/", "*.gemspec", "*.gem", "Gemfile", "Gemfile.lock", "package.json", "package-lock.json", "script/", "LICENSE.txt", "lib/", "bin/", "README.md", "Rakefile", "docs/tests/"]
94 |
95 | # Back to top link
96 | back_to_top: true
97 | back_to_top_text: "Back to top"
98 |
99 | ga_tracking: UA-177508304-1
100 | ga_tracking_anonymize_ip: true # Use GDPR compliant Google Analytics settings (true/nil by default)
101 | # Aux links for the upper right navigation
102 | aux_links:
103 | "Azure OpenAI API Hands-on LevelUp Labs on GitHub":
104 | - "//github.com/ahmedbham/azure-openai-api-levelup"
105 |
106 | # Makes Aux links open in a new tab. Default is false
107 | aux_links_new_tab: true
108 |
--------------------------------------------------------------------------------
/docs/_posts/2022-03-15-welcome-to-jekyll.markdown:
--------------------------------------------------------------------------------
1 | ---
2 | layout: post
3 | title: "Welcome to Jekyll!"
4 | date: 2022-03-15 11:28:40 -0400
5 | categories: jekyll update
6 | ---
7 | You’ll find this post in your `_posts` directory. Go ahead and edit it and re-build the site to see your changes. You can rebuild the site in many different ways, but the most common way is to run `jekyll serve`, which launches a web server and auto-regenerates your site when a file is updated.
8 |
9 | Jekyll requires blog post files to be named according to the following format:
10 |
11 | `YEAR-MONTH-DAY-title.MARKUP`
12 |
13 | Where `YEAR` is a four-digit number, `MONTH` and `DAY` are both two-digit numbers, and `MARKUP` is the file extension representing the format used in the file. After that, include the necessary front matter. Take a look at the source for this post to get an idea about how it works.
14 |
15 | Jekyll also offers powerful support for code snippets:
16 |
17 | {% highlight ruby %}
18 | def print_hi(name)
19 | puts "Hi, #{name}"
20 | end
21 | print_hi('Tom')
22 | #=> prints 'Hi, Tom' to STDOUT.
23 | {% endhighlight %}
24 |
25 | Check out the [Jekyll docs][jekyll-docs] for more info on how to get the most out of Jekyll. File all bugs/feature requests at [Jekyll’s GitHub repo][jekyll-gh]. If you have questions, you can ask them on [Jekyll Talk][jekyll-talk].
26 |
27 | [jekyll-docs]: https://jekyllrb.com/docs/home
28 | [jekyll-gh]: https://github.com/jekyll/jekyll
29 | [jekyll-talk]: https://talk.jekyllrb.com/
30 |
--------------------------------------------------------------------------------
/docs/assets/images/module0/aks-devsecops-architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module0/aks-devsecops-architecture.png
--------------------------------------------------------------------------------
/docs/assets/images/module0/devsecops-lifycycle-phases.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module0/devsecops-lifycycle-phases.png
--------------------------------------------------------------------------------
/docs/assets/images/module1/codespaces-new-terminal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module1/codespaces-new-terminal.png
--------------------------------------------------------------------------------
/docs/assets/images/module1/curl-localhost.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module1/curl-localhost.png
--------------------------------------------------------------------------------
/docs/assets/images/module1/module1-create-event-subscription.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module1/module1-create-event-subscription.png
--------------------------------------------------------------------------------
/docs/assets/images/module1/module1-function-app-settings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module1/module1-function-app-settings.png
--------------------------------------------------------------------------------
/docs/assets/images/module1/screenshot-localhost.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module1/screenshot-localhost.png
--------------------------------------------------------------------------------
/docs/assets/images/module1/transaction-classification-application-architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module1/transaction-classification-application-architecture.png
--------------------------------------------------------------------------------
/docs/assets/images/module2/appcomponents.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module2/appcomponents.png
--------------------------------------------------------------------------------
/docs/assets/images/module2/chatscreen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module2/chatscreen.png
--------------------------------------------------------------------------------
/docs/assets/images/module2/cog_resources.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module2/cog_resources.png
--------------------------------------------------------------------------------
/docs/assets/images/module2/endpoint.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module2/endpoint.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/book-creator-sequence.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/book-creator-sequence.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/bookcreator1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/bookcreator1.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/bookcreator2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/bookcreator2.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/bookcreator3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/bookcreator3.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/semantic-function.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/semantic-function.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/sk-bcplay.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/sk-bcplay.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/sk-bookcreator.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/sk-bookcreator.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/sk-bookcstart.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/sk-bookcstart.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/sk-pipeline.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/sk-pipeline.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/sk-use-cases.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/sk-use-cases.png
--------------------------------------------------------------------------------
/docs/assets/images/module3/sk-why.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/images/module3/sk-why.png
--------------------------------------------------------------------------------
/docs/assets/slides/Intro.pptx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/docs/assets/slides/Intro.pptx
--------------------------------------------------------------------------------
/docs/index.markdown:
--------------------------------------------------------------------------------
1 | # Azure OpenAI API Hands-on LevelUp Labs
2 |
3 | Welcome to the **Azure OpenAI API Hands-on LevelUp Labs** repository! This repository contains three hands-on labs that will help you build intelligent applications using Azure OpenAI API.
4 |
5 | ## Lab 1: Transaction Classification Application
6 |
7 | This lab demonstrates how to classify transaction records using OpenAI's GPT-3 language model and Azure Storage, Azure Event Grid, and Azure Function services. You will learn how to build an intelligent transaction classification app that can automatically categorize financial transactions.
8 |
9 | ## Lab 2: ChatGPT + Enterprise Data with OpenAI and Azure Cognitive Search
10 | This lab shows how to use Azure OpenAI Service to access the ChatGPT model (gpt-35-turbo) and Azure Cognitive Search for data indexing and retrieval. You will learn how to build a chatbot that can answer questions using enterprise data.
11 |
12 | ## Lab 3: Book Creator Sample Learning App
13 |
14 | This lab demonstrates how to use the [semantic kernel](https://github.com/microsoft/semantic-kernel) framework to integrate OpenAI's Large Language Models (LLMs) with conventional programming languages. You will learn how to build an intelligent book creator app that can generate new content based on existing text.
15 |
16 | ## Getting Started
17 |
18 | To get started with these labs, you will need to have an Azure OpenAI API key and access to Azure services. Please follow the instructions in [pre-req](./modules/Module0/prereq.md) file to set up the necessary environment and dependencies
19 |
20 |
21 | ## Contributors
22 |
23 |
24 | {% for contributor in site.github.contributors %}
25 |
26 |
27 |
28 | {% endfor %}
29 |
30 |
31 | ## Trademarks
32 |
33 | This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
34 | trademarks or logos is subject to and must follow [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies.
35 |
--------------------------------------------------------------------------------
/docs/modules/Module0/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Module 0 - Introduction and Pre-requisites
3 |
4 | has_children: true
5 |
6 | ---
7 |
8 |
9 | ## Module 0 - Introduction and Pre-requisites
10 |
--------------------------------------------------------------------------------
/docs/modules/Module0/intro.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Introduction
3 | parent: Module 0 - Introduction and Pre-requisites
4 | has_children: false
5 | nav_order: 1
6 | ---
7 |
8 | **What is Azure OpenAI Service?**
9 |
10 | Azure OpenAI Service is a cloud-based service that provides access to
11 | the OpenAI API. You can use the OpenAI API to perform the following
12 | tasks:
13 |
14 | - Language Understanding
15 |
16 | - Text Summarization
17 |
18 | - Semantic Search
19 |
20 | - Conversation AI
21 |
22 | - Code Generation
23 |
24 | OpenAI is a powerful Language Generative model that predicts the next
25 | token to generate text output based on the input instruction from the
26 | user. Azure OpenAI is the model pretrained and hosted in Azure for
27 | easier deployment for the customer projects.
28 |
29 | To learn more about Azure OpenAI Service, you can:
30 |
31 | - Check out the [Azure OpenAI Service
32 | documentation](https://docs.microsoft.com/en-us/azure/openai/).
33 |
34 | ## Concepts:
35 | The user 'Prompt' gives text instructions with the appropriate context.
36 | The more detailed it is with possible examples, it would help the model
37 | to arrive to the right context and generate the result set 'Completion'
38 | that is presented to the user.
39 |
40 | You can train the model with one or few-shot examples or with
41 | interactions. The model can be fine-tuned with a few parameters to
42 | customize it to the specific need. The model can be tuned to be
43 | deterministic/probabilistic or instructed to continue with the results
44 | based on these set parameter values.
45 |
46 | ## Azure OpenAI Service Models:
47 |
48 | - GPT-3 is the first offering with the 4 models Ada, Babbage, Curie
49 | and Davinci with the increasing inferencing capabilities, but would
50 | consume more time for presenting the results. The GPT Codex models
51 | supports Co-pilot.
52 |
53 | - GPT-35-Turbo is the ChatGPT model option with improved accuracy for
54 | a conversational model.
55 |
56 | - GPT-4 is the preview version that allows for a larger token size
57 | prompts and has security built-in. You can request using this
58 | [Access Request
59 | Form](https://customervoice.microsoft.com/Pages/ResponsePage.aspx?id=v4j5cvGGr0GRqy180BHbR7en2Ais5pxKtso_Pz4b1_xURjE4QlhVUERGQ1NXOTlNT0w1NldTWjJCMSQlQCN0PWcu)
60 |
61 | ## Applications and Use cases:
62 |
63 | The language generation from the GPT is based on the semantics of the
64 | Prompt that help it to the inference Completion in the below scenarios
65 | with some examples:
66 |
67 | - Writing Assistance:
68 |
69 | - Government agency using Azure OpenAI Service to extract and
70 | summarize key information from their extensive library of rural
71 | development reports.
72 |
73 | - Financial services using Azure OpenAI Service to summarize
74 | financial reporting for peer risk analysis and customer
75 | conversation summarization.
76 |
77 | - Code Generation:
78 |
79 | - Aircraft company using to convert natural language to SQL for
80 | aircraft telemetry data.
81 |
82 | - Consulting service using Azure OpenAI Service to convert natural
83 | language to query propriety data models.
84 |
85 | - Reasoning over data
86 |
87 | - Financial services firm using Azure OpenAI Service to improve
88 | search capabilities and the conversational quality of a
89 | customer's Bot experience.
90 |
91 | - Insurance companies extract information from volumes of
92 | unstructured data to automate claim handling processes.
93 |
94 | - Summarization:
95 |
96 | - International insurance company using Azure OpenAI Service to
97 | provide summaries of call center customer support
98 | conversation-logs.
99 |
100 | - Global bank uses Azure OpenAI Service to summarize financial
101 | reporting and analyst articles .
102 |
103 | ## Prompt Engineering:
104 |
105 | The model is only as effective as the Prompts sent as input. And this
106 | also trains the models to arrive to a customized model with appropriate
107 | inference context. Here are a few techniques that can support a better
108 | model performance:
109 |
110 | 1. Structure the input to instruct the model in a step-by-step process
111 | to make it understand the question and suggest it arrive to the
112 | inference.
113 |
114 | 2. Prompt Chaining helps to elicit more reliable answers and fine tune
115 | it with thousands of Prompts to fine tune it.
116 |
117 | 3. The models are limited by the Prompt token size for the deployment
118 | type chosen. Long text beyond the token limit is broken into Chunks
119 | and processed.
120 |
121 | 4. Leverage One-Shot/Few-Shot reasoning to be specific about what is
122 | the expected result set. The model can learn using these scenarios
123 | presented in the Prompt, and you are explicitly telling the mode how
124 | to think by prompting how it should reason for the similar problem.
125 |
126 | 5. This technique called Chain-of-Thought, is a super powerful
127 | technique, not only can it be used to provide model explainability
128 | (where sometimes GPT-3 is seen as a blackbox) but it can help the
129 | model reason and arrive at a desired output by simply just telling
130 | the model to think step by step.
131 |
132 | 6. One interesting trick is to have the model decompose the task into
133 | smaller tasks and figure it out on its own. This allows the model to
134 | reason along the way and can lead to much better results. The
135 | technique is called selection-inference prompting.
136 |
137 | ## Responsible AI (RAI):
138 |
139 | The AI models designed for a specific purpose needs to be perceived to
140 | be safe, trustworthy, and ethical. Responsible AI can help proactively
141 | guide these decisions toward more beneficial and equitable outcomes.
142 |
143 | - Ensure the model is compliant to the principles of RAI at different
144 | layers of the model deployed with appropriate checks and assessments
145 | at Fine Tuning , at Prompts to generated results , monitoring the
146 | response and the product performance against the expected promises.
147 |
148 | - Content Filtering, Feedback channel, Transparency in the product are
149 | a few ways to ensure application is Fair , Reliable , Transparent
150 | and Secure.
151 |
152 | ## How do I get started with building applications using Azure OpenAI Service?
153 |
154 | The best way to get started with building applications using Azure
155 | OpenAI Service is to follow the tutorials in this repository.
156 |
--------------------------------------------------------------------------------
/docs/modules/Module0/prereq.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Pre-requisites
3 | parent: Module 0 - Introduction and Pre-requisites
4 | has_children: false
5 | nav_order: 2
6 | ---
7 |
8 | ## What are the pre-requisites for this workshop?
9 |
10 | * Azure Subscription (if you don't have one, you can create a free account [here](https://azure.microsoft.com/en-us/free/))
11 | * Azure CLI (if you don't have one, you can install it [here](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli))
12 | * Github Account (if you don't have one, you can create one [here](https://github.com)
13 | * Github Codespaces - Github Codespaces is a feature of Github that allows you to develop in a cloud-hosted, container-based development environment. To learn more about Github Codespaces, you can check out the [documentation](https://docs.github.com/en/codespaces).
14 | * Azure OpenAI API resource and deployment of `text-davinci-003` model (see instructions for setting these up [here](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal))
15 |
16 | ## How do I get started with the workshop?
17 |
18 | To get started with the workshop, we will perform the following tasks:
19 |
20 | 1. Forking this repository into your GitHub account
21 | 1. Creating Github Codespace on the forked repository
22 |
23 | ### Forking this repository into your GitHub account
24 |
25 | * Fork this [repository](https://github.com/ahmedbham/azure-openai-api-levelup) into your GitHub account by clicking on the "Fork" button at the top right of its page.
26 |
27 | ### Creating Github Codespaces on the forked repository
28 |
29 | * Open your forked Github repository in Github and click on the `Code` tab.
30 | * Click on the `Open with Codespaces` button
31 |
--------------------------------------------------------------------------------
/docs/modules/Module1/Lab-1.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Lab
3 | parent: Module 1 - Transaction Classification Application
4 | has_children: false
5 | nav_order: 2
6 | ---
7 |
8 | # Module 1: Lab - Transaction Classification Application
9 |
10 | In this lab, you will create a transaction classification application that uses the Azure OpenAI API and other Azure services to classify a public dataset of transactions into a number of categories that we have predefined. These approaches should be replicable to any multiclass classificaiton use case where we are trying to fit transactional data into predefined categories, and by the end of running through this you should have an approach for dealing with unlabelled datasets.
11 |
12 | For this workshop, we will be using GitHub Actions using OpenID Connect and Infrastructure-as-Code (IaC) using Bicep, to derive following benefits:
13 |
14 | * Infrastructure-as-Code (IaC) - Infrastructure is defined as code, and can be version controlled and reviewed.
15 | * OpenID Connect - OpenID Connect is an authentication protocol that allows you to connect securely to Azure resources using your GitHub account.
16 | * GitHub Actions - GitHub Actions is a feature of GitHub that allows you to automate your software development workflows.
17 |
18 | ## Steps for Deploying the infrastructure
19 |
20 | 1. Creating an Azure Resource Group
21 | 1. Creating Azure OpenAI API resource and deployment of `text-davinci-003` model
22 | 1. Configuring OpenID Connect in Azure.
23 | 1. Setting Github Actions secrets
24 | 1. Enabling the GitHub Actions workflow
25 | 1. Creation of Azure Storage Account and Azure Function App
26 | 1. Deploying the Azure Function App code
27 | 1. Creating and configuring Event Grid Subscription
28 |
29 | ### Creating an Azure Resource Group
30 |
31 | ```bash
32 | az login
33 | ```
34 |
35 | > [!NOTE]
36 | > if you are using a non-Microsoft account (e.g. FDPO account), and if are running CodeSpaces in the browser, you may receive an error with message `localhost refused to connect` after logging in.
37 | > 
38 | > If so:
39 | >
40 | > 1. Copy the URL.
41 | > 1. Run `curl ''` (URL in quotes) in a new Visual Studio Code terminal.
42 | >
43 | > 
44 | > 1. In the original terminal, the login should now succeed.
45 |
46 | Set appropriate Subscription Id
47 |
48 | ```bash
49 | az account set -s
50 | ```
51 |
52 | Ensure correct Subscription Id is set
53 |
54 | ```bash
55 | az account show
56 | ```
57 |
58 | Register the following providers
59 |
60 | ```bash
61 | az provider register --namespace Microsoft.Storage --wait
62 | az provider register --namespace Microsoft.Web --wait
63 | az provider register --namespace Microsoft.EventGrid --wait
64 | ```
65 |
66 | Create Azure Resource Group
67 |
68 | ```bash
69 | export resourceGroupName="openai-levelup-rg"
70 | export location="eastus"
71 | az group create --name $resourceGroupName --location $location
72 | ```
73 |
74 | ### Creating Azure OpenAI API resource and deployment of `text-davinci-003` model
75 |
76 | If you have not already created an Azure OpenAI API resource and deployed the `text-davinci-003` model, you can follow the instructions [here](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal) to do so.
77 |
78 | ### Configuring OpenID Connect in Azure
79 |
80 | Execute `chmod` command on [aad-federated-cred.sh](../../../tools/deploy/module0/aad-federated-cred.sh) script to make it executable:
81 |
82 | ```bash
83 | chmod +x ./tools/deploy/module0/aad-federated-cred.sh
84 | ```
85 |
86 | * execute [aad-federated-cred.sh](../../../tools/deploy/module0/aad-federated-cred.sh), passing your github username as the argument, as shown below:
87 |
88 | ```bash
89 | ./tools/deploy/module0/aad-federated-cred.sh
90 | ```
91 |
92 | * note down the **appId** echoed for use in next step
93 |
94 | ### Setting Github Actions secrets
95 |
96 | 1. In your forked Github repository, click on the `Settings` tab.
97 | 2. In the left-hand menu, expand `Secrets and variables`, and click on `Actions`.
98 | 3. Click on the `New repository secret` button for each of the following secrets:
99 | * `AZURE_SUBSCRIPTION_ID`(run `az account show --query id -o tsv` to get the value)
100 | * `AZURE_TENANT_ID` (run `az account show --query tenantId --output tsv` to get the value)
101 | * `AZURE_CLIENT_ID` (this is the `appId` from running the previous step)
102 | * `AZURE_RESOURCE_GROUP` (this is the `resourceGroupName` from earlier step, which is `openai-levelup-rg`)
103 |
104 | ### Enable GitHub Actions workflow
105 |
106 | * Enable **GitHub Actions** for your repository by clicking on the **Actions** tab, and clicking on the `I understand my workflows, go ahead and enable them` button.
107 |
108 | ### Creation of Azure Storage Account and Azure Function App
109 |
110 | * This is achieved by running the Github Actions workflow file [module1-infra-worflow.yaml](../../../.github/workflows/module1-infra-workflow.yaml) which executes [module2-infra.bicep](../../../tools/deploy/Module1/infra/module1-infra.bicep) Bicep template. To trigger this workflow manually:
111 | 1. click on the `Actions` tab.
112 | 2. Select `Deploy Module 1 Infrastructure` workflow.
113 | 3. Click on the `Run workflow` button
114 |
115 | ### Deploying the Azure Function App code
116 |
117 | * Deploy the Azure Function App code using Github Actions workflow file [module2-code-workflow.yaml](../../../.github/workflows/module1-code-workflow.yaml) as follows:
118 | 1. Click on the `Actions` tab.
119 | 2. Select `Deploy Azure Function App` workflow.
120 | 3. Click on the `Run workflow` button
121 |
122 | * Configure following **Application Settings** for the Azure Function by going to your `function app > Configuration > Application Settings`:
123 | 1. OPENAI_API_BASE - Azure OpenAI API Endpoint URL (e.g. https://openai-demo-ahmedbham.openai.azure.com/)
124 | 2. OPENAI_API_KEY - Azure OpenAI API Key
125 | 3. OPENAI_API_MODEL - "text-davinci-003" (set it equal to the `model name` you provided when deploying the `text-davinci-003` **model** in Azure OpenAI Studio)
126 | **Remember to click Save after adding the above settings**
127 |
128 | ### Creating and configuring Event Grid Subscription
129 |
130 | * Create an `Event Grid Subscription` to the Azure Function App Resource from the Azure Storage Account for "Blob Created" events in Azure portal:
131 | 1. Navigate to your `storage account > Events > + Event Subscription`
132 | 2. Set `Event Schema` to `Event Grid Schema``
133 | 3. Set `System Topic Name` to `classification`
134 | 4. Select only `Blob Created` event type
135 | 5. Select`Function App` as the `Endpoint Type`
136 | 6. Set `Endpoint`value by selecting the correct `Subscription`, `Resource Group`, `Function App`, `Slot`, and `Function` from the dropdowns
137 | 7. Click `Create` to create the subscription
138 |
139 | 
140 |
141 | ## Testing Transaction Classification App
142 |
143 | * Open the sample transaction file [25000_spend_dataset_current_25.csv](../../../tools/deploy/module1/data/25000_spend_dataset_current_25.csv) and notice that the **classification** column is empty. This is the column that will be populated by the Azure Function by calling Azure OpenAI API.
144 | * Upload this file to the **classification** blob container: `portal > storage account > containers > classification > upload`
145 | * After few seconds, download the updated file from the **output** blob container `portal > storage account > containers > output > download`
146 | * Open the file and notice the **classification** column is populated with the predicted category for each transaction.
147 |
148 | ## Delete Resources
149 |
150 | * Delete all resources created in this lab by deleting the resource group that was created in the first step of this lab.
151 |
152 | ```bash
153 | az group delete --name --yes
154 | ```
155 |
--------------------------------------------------------------------------------
/docs/modules/Module1/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Module 1 - Transaction Classification Application
3 |
4 | has_children: true
5 |
6 | ---
7 |
8 | ## Module 1 - Transaction Classification Application
9 |
10 |
--------------------------------------------------------------------------------
/docs/modules/Module1/intro.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Overview
3 | parent: Module 1 - Transaction Classification Application
4 | has_children: false
5 | nav_order: 1
6 | ---
7 |
8 | ## Module 1 - Transaction Classification Application
9 |
10 | This is a tutorial project that demonstrates how to integrate Azure OpenAI API with Azure services to build intelligent applications. Specifically, it shows how to classify transaction records using OpenAI's GPT-3 language model and Azure Storage, Azure Event Grid and Azure Function services.
11 |
12 | The intent is to classify a public dataset of transactions into a number of categories that we have predefined. These approaches should be replicable to any multiclass classificaiton use case where we are trying to fit transactional data into predefined categories, and by the end of running through this you should have an approach for dealing with unlabelled datasets.
13 |
14 | ## Diagram
15 |
16 | 
17 |
18 | ## Azure Function Logic
19 |
20 | The Azure Function logic is implemented in [TransactionClassifier.cs](../../../tools/deploy/module1/TransactionClassifier/TransactionClassifier.cs) file. The logic is as follows:
21 |
22 | ### Prompt Engineering
23 |
24 | A prompt is a string of text that is used to guide the language model to generate a response. In this case, we are using the following prompt to guide the language model to classify the transaction records:
25 |
26 | ```csharp
27 | "You are a data expert working for the National Library of Scotland.
28 | You are analysing all transactions over �25,000 in value and classifying them into one of five categories.
29 | The five categories are Building Improvement, Literature & Archive, Utility Bills, Professional Services and Software/IT.
30 | If you can't tell what it is, say Could not classify
31 |
32 | Transaction:
33 |
34 | Supplier: SUPPLIER_NAME
35 | Description: DESCRIPTION_TEXT
36 | Value: TRANSACTION_VALUE
37 |
38 | The classification is:"
39 | ```
40 |
41 | When the function is triggered when a csv file is uploaded to the blob storage, it will replace the `SUPPLIER_NAME`, `DESCRIPTION_TEXT` and `TRANSACTION_VALUE` placeholders with the actual values from the transaction records in that file.
42 |
43 | ### Classifying Transactions
44 |
45 | The function will then call the Azure OpenAI API to classify the transaction record. The function will then parse the response from the API and extract the classification. Finally, the function will update the transaction record in an output csv file, and upload it to a blob container.
46 |
47 |
--------------------------------------------------------------------------------
/docs/modules/Module2/Lab-2.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Lab
3 | parent: Module 2 - Azure OpenAI and Cognitive Search
4 | has_children: false
5 | nav_order: 2
6 | ---
7 |
8 | ## Installation
9 |
10 | ### Preferred CLI Environment for this lab
11 |
12 | It's easiest to run the lab instructions using [Azure Cloud Shell](https://shell.azure.com).
13 |
14 | ### Project Initialization
15 |
16 | 1. Create a new folder `lab2` and switch to it in the terminal
17 |
18 | ```bash
19 | mkdir lab2
20 | cd lab2
21 | ```
22 |
23 | 1. Run the following command:
24 |
25 | ```bash
26 | your_tenant_id=$(az account show --query tenantId -o tsv)
27 | azd auth login --use-device-code=false --tenant-id $your_tenant_id
28 | ```
29 |
30 | > **NOTE**
31 | >
32 | > You may receive an error with message `localhost refused to connect` after logging in. If so:
33 | >
34 | > 1. Copy the URL.
35 | > 1. Open a new Cloud Shell or CodeSpaces terminal
36 | > 1. Run `curl ''` (URL in quotes) in the new terminal.
37 | >
38 | > Return to the original CodeSpaces or Cloud Shell terminal; the login should now succeed.
39 |
40 | 1. Run the following command:
41 |
42 | ```bash
43 | yourSubscriptionID=$(az account show --query id -o tsv)
44 | azd config set defaults.subscription $yourSubscriptionID
45 | ```
46 |
47 | 1. Run the following command:
48 |
49 | ```bash
50 | azd init -t azure-search-openai-demo
51 | ```
52 |
53 | * For the target location, the regions that currently support the models used in this sample are **East US** or **South Central US**. For an up-to-date list of regions and models, check [here](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/concepts/models)
54 | * when prompted with `? Please enter a new environment name:`, enter `dev`
55 |
56 | > **NOTE**
57 | >
58 | > If you would like re-use your existing Azure OpenAI service resource:
59 | >
60 | > Run `azd env set AZURE_OPENAI_SERVICE {Name of existing Azure OpenAI service}`
61 | >
62 |
63 | 1. Execute the following command:
64 |
65 | ```bash
66 | azd up
67 | ```
68 |
69 | * This will provision Azure resources and deploy this sample to those resources, including building the search index based on the files found in the ./data folder.
70 | * After the application has been successfully deployed you will see a URL printed to the console. Click that URL to interact with the application in your browser.
71 |
72 | * It will look like the following:
73 | 
74 |
75 | > NOTE: It may take a minute for the application to be fully deployed. If you see a "Python Developer" welcome screen, then wait a minute and refresh the page.
76 |
77 | Once in the web app:
78 |
79 | * Try different topics in chat or Q&A context. For chat, try follow up questions, clarifications, ask to simplify or elaborate on answer, etc.
80 | * Explore citations and sources
81 | * Click on "settings" to try different options, tweak prompts, etc.
82 |
--------------------------------------------------------------------------------
/docs/modules/Module2/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Module 2 - Azure OpenAI and Cognitive Search
3 |
4 | has_children: true
5 |
6 | ---
7 |
8 | ## Module 2 - Azure OpenAI and Cognitive Search
9 |
10 |
--------------------------------------------------------------------------------
/docs/modules/Module2/intro.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Overview
3 | parent: Module 2 - Azure OpenAI and Cognitive Search
4 | has_children: false
5 | nav_order: 1
6 | ---
7 |
8 | # Azure OpenAI and Cognitive Search
9 |
10 | This sample demonstrates a few approaches for creating ChatGPT-like experiences over your own data using the Retrieval Augmented Generation pattern. It uses Azure OpenAI Service to access the ChatGPT model (gpt-35-turbo), and Azure Cognitive Search for data indexing and retrieval.
11 |
12 | In this sample application we use a fictitious company called Contoso Electronics, and the experience allows its employees to ask questions about the benefits, internal policies, as well as job descriptions and roles.
13 |
14 | 
15 |
16 | ## Features
17 |
18 | * Chat and Q&A interfaces
19 | * Explores various options to help users evaluate the trustworthiness of responses with citations, tracking of source content, etc.
20 | * Shows possible approaches for data preparation, prompt construction, and orchestration of interaction between model (ChatGPT) and retriever (Cognitive Search)
21 | * Settings directly in the UX to tweak the behavior and experiment with options
22 |
23 | 
24 |
25 | ## Workflow
26 |
27 | In this exercise, we deploy following resources in a single resource group.
28 |
29 | 
30 |
31 | ## How this works
32 |
33 | The application has mainly three parts:
34 |
35 | 1. Prep-docs
36 | 2. Front-end
37 | 3. Back-end
38 |
39 | ### Prep-docs
40 |
41 | As part of the deployment, we will upload the documents in the `/data` folder to the storage account and split the documents into individual pages. Basically, in the prep-doc step, we will process the PDF files, extract text and tables, split the content into sections (1000 characters), and upload the sections to an Azure search index for searching and retrieval.
42 |
43 | ### Front-end
44 |
45 | Node.js and TypeScript are the main tools used for building the frontend web application. The main methods being used in the frontend application to communicate with a backend API for question-asking and chat-like conversations are:
46 |
47 | - `askApi`: Takes an AskRequest object, sends a POST request to the `/ask` endpoint, and returns an AskResponse object. It throws an error if the response is not OK.
48 | - `chatApi`: Takes a ChatRequest object, sends a POST request to the `/chat` endpoint, and returns an AskResponse object. It throws an error if the response is not OK.
49 | - `getCitationFilePath`: Takes a citation string and returns the file path by concatenating `/content/` with the citation.
50 |
51 | ### Back-end
52 |
53 | The back-end is a python flask application that hosts the `/ask` and `/chat` endpoints. The back-end establishes a connection to the Azure OpenAI resource to send a question to an OpenAI agent and retrieve the answer. The major functionalities discussed in this exercise are chat features and question-answer features. For the QA and chat, the following approaches are experimented within this Lab:
54 | #### Ask Approaches:
55 | > Note, most applications will use a single one of these patterns or some derivative. Several were included in this exercise to allow exploration of the different approaches.
56 |
57 | * **_ReadDecomposeAsk_** - This approach breaks down a given question into smaller tasks and searches for relevant information to answer the question. It uses a series of search and lookup actions, along with an AI agent, to find the answer. The agent iteratively refines the search, examines the search results, and performs lookups as needed. It uses the ReAct agent to perform these actions.
58 |
59 | * **_ReadRetrieveReadApproach_** - This approach first reads the question and then retrieves relevant documents from a document store. After that, the AI agent reads these documents to find the answer. This approach is particularly suitable when there's a need to search through large document collections and extract relevant information to answer the question.
60 |
61 | * **_RetrieveThenReadApproach_** - This approach retrieves relevant documents first, then reads the documents to answer the question. Unlike the ReadRetrieveReadApproach, which reads the question first, this approach starts with retrieval, followed by reading the retrieved documents to find the answer. This can be useful when the retrieval process is more important or computationally expensive than understanding the question.
62 |
63 | These three approaches work together by providing different ways to tackle question-answering tasks based on the specific requirements of the task or the dataset being used. While they follow different methodologies, they all focus on retrieving relevant information and using OpenAI agents to process that information to generate a suitable answer. In this lab, you could choose the most appropriate approach for a specific question or dataset to improve the overall performance and efficiency of the question-answering system in the developer settings.
64 |
65 | #### Chat Approaches:
66 | * **_ChatReadRetrieveRead_** - This is an implementation of an information retrieval and question-answering approach using Azure Cognitive Search and the OpenAI API. The class has a specific structure and methods to perform the following steps:
67 |
68 | 1. Generate an optimized keyword search query based on the chat history and the last question asked by the user.
69 | 2. Retrieve relevant documents from the search index using the generated query.
70 | 3. Generate a contextual and content-specific answer using the search results and chat history.
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/docs/modules/Module3/bookcreatorflow.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Book Creator detailed flow
3 | parent: Module 3 - Semantic Kernel
4 | has_children: false
5 | nav_order: 3
6 | ---
7 |
8 | ## Module 3: Book Creator application detailed flow
9 |
10 | The following diagrams illustrates how skills/functions are defined and the interactions of the Book creator Client app and the Semantic Kernel API service exposed via the dotnet http endpoint.
11 |
12 | ### How are semantic skills defined?
13 |
14 | 
15 |
16 | ### Sequence diagram - Book creator -> SK Service
17 |
18 | The Semantic Kernel function API exposes 3 POST endpoints
19 |
20 | - /skills/{skillName}/invoke/{functionName}
21 | - /planner/createPlan
22 | - /planner/execute/{maxSteps}
23 |
24 | 
25 |
26 | 1. The childrensBookSkill is a predefined skill that with a templated prompt that can be found in the samples/skills folder, a skill may have one to many sub-skills or functions.
27 | 2. The planner skill is also a specialized skill that can leverage additional skills, embeddings and connectors.
28 | 3. The planner execute API uses the templated values of the previous steps from the planner to fulfill the user's Ask.
29 |
--------------------------------------------------------------------------------
/docs/modules/Module3/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Module 3 - Semantic Kernel
3 |
4 | has_children: true
5 |
6 | ---
7 |
8 |
9 | ## Module 3 - Semantic Kernel
10 |
11 |
--------------------------------------------------------------------------------
/docs/modules/Module3/intro.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Overview
3 | parent: Module 3 - Semantic Kernel
4 | has_children: false
5 | nav_order: 1
6 | ---
7 |
8 | ## Module 3 - Semantic Kernel
9 |
10 | ## What is Semantic Kernel?
11 |
12 | Semantic Kernel (SK) is an Open source, Lightweight SDK that allows you to easily integrate Large Language Models (LLMs) like ChatGPT into your application.
13 |
14 | Semantic Kernel provides the developer with controls over AI LLM constructs such as prompts, embeddings and allows you to provide rich context to your application specific use cases that you would otherwise not have by just using OpenAI/Azure OpenAI APIs.
15 |
16 | ## Why is Semantic Kernel needed?
17 |
18 | 
19 |
20 | ## Semantic Kernel - Use Cases
21 |
22 | 
23 |
24 | ## Semantic Kernel - Pipeline
25 |
26 | 
27 |
28 |
29 | ## Semantic Kernel - building blocks
30 |
31 | - Planner - The planner is responsible for fulfilling a specific user ASK, the planner has access to a pre-defined library of pre-made skills and can also call upon memories to best situate the ASK's context and connectors to call APIs and to leverage other external capabilities if needed.
32 |
33 | - Memories – Also referred to as Embeddings and provides ability to provide specific context to an ASK or Prompt.
34 |
35 | - Skills – Domain expertise exposed as a function, an ASK may use one or many skills/functions to accomplish a goal.
36 |
37 | - Connectors – Ability to integrate with external, realtime APIs to enrich the response or perform an action.
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/docs/modules/Module3/lab-module3.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Lab
3 | parent: Module 3 - Semantic Kernel
4 | has_children: false
5 | nav_order: 2
6 | ---
7 |
8 | ## Module 3: Lab - Book Creator application
9 |
10 | The Book creator sample application shows how SK uses `planner`, `skills` and `memories` are to create a children's book. The `Planner` creates a plan for the skills (functions) that are pre-defined for generating a book topic and creating a book. The `Writer` pre-defined skills are chained together based on the user ask to create the book.
11 |
12 | Requirements:
13 |
14 | - You will need an Open AI API Key or Azure Open AI service key to get started.
15 | - [Azure Functions Core Tools]([https://learn.microsoft.com/en-us/azure/azure-functions/functions-run-local?tabs=v4%2Cwindows%2Ccsharp%2Cportal%2Cbash) are required to run the kernel as a local web service, this service is used by the sample web app.
16 | - [.NET 6](https://dotnet.microsoft.com/en-us/download/dotnet/6.0). If you have .NET 7 installed, Azure Function Tools will still require .NET 6, so we suggest installing both.
17 | - [Yarn](https://yarnpkg.com/getting-started/install) is used for installing web apps' dependencies.
18 |
19 |
20 | ## Running the Book Creator sample app
21 |
22 | **Note**: to run the sample in VS Code Desktop using Dev Containers from Github Codespaces:
23 | Click hamburger menu on the left side of the VS Code window in Codespaces and select **Open in VS Code Desktop**
24 |
25 | 1. Clone the `semantic kernel` repo
26 |
27 | ```bash
28 | git clone https://github.com/microsoft/semantic-kernel.git
29 | ```
30 |
31 | 2. Start the `KernelHttpServer`. This is the backend API server used by the front end React app and is the intermediary between the front end client application and AzureOpenAI
32 |
33 | ```bash
34 | cd semantic-kernel/samples/dotnet/KernelHttpServer
35 | ```
36 |
37 | 3. **Run**
38 |
39 | ```bash
40 | func start --csharp
41 | ```
42 |
43 | This will run the service API locally at `http://localhost:7071`.
44 |
45 |
46 | `cd semantic-kernel/samples/dotnet/KernelHttpServer`
47 |
48 | 4. In another terminal window, start the `BookCreator` web application
49 |
50 | ```bash
51 | cd semantic-kernel/samples/apps/book-creator-webapp-react/
52 | ```
53 |
54 | 5. Rename the `semantic-kernel/samples/apps/book-creator-webapp-react/.env.example` file to `semantic-kernel/samples/apps/book-creator-webapp-react/.env`.
55 |
56 | 6. Start the book creator app by running the following commands
57 |
58 | ```bash
59 | yarn install
60 | yarn start
61 | ```
62 |
63 | 7. A browser will automatically open, otherwise you can navigate to `http://localhost:3000` to access the application.
64 |
65 | 8. On the application main page, enter your AzureOpenAI key and also a value for the model ID as shown below, you may use the default **text-davinci-003** and click **Save** button
66 |
67 | 
68 |
69 | 9. Follow the application prompts to enter a book idea and click **get Ideas** button to see some sample suggestions, screenshot should similar to below
70 |
71 | 
72 |
73 | 10. Select an idea option and click on the **create book** button to see the newly created book. Note: you need to additionally click on the play button as shown below on the next screen to get the book contents.
74 |
75 | 
76 |
77 | The final result of the book
78 |
79 | 
80 |
81 |
82 |
--------------------------------------------------------------------------------
/tools/deploy/module0/aad-federated-cred.sh:
--------------------------------------------------------------------------------
1 | # set github user name from command line argument
2 | github_username=$1
3 | if [ -z "$github_username" ]; then
4 | echo "Usage: $0 github_username"
5 | exit 1
6 | fi
7 |
8 | resourceGroupName=$2
9 | if [ -z "$resourceGroupName" ]; then
10 | resourceGroupName=openai-levelup-rg
11 | fi
12 |
13 | sed -i "s/your-github-username/$github_username/g" tools/deploy/module0/credential.json
14 |
15 | # Create an Azure AD application
16 | uniqueAppName=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c10 ; echo '')
17 | appId=$(az ad app create --display-name $uniqueAppName --query appId --output tsv)
18 | echo "appId is $appId"
19 |
20 | # Create a service principal for the Azure AD app.
21 | assigneeObjectId=$(az ad sp create --id $appId --query id --output tsv)
22 |
23 | # Create a role assignment for the Azure AD app.
24 | subscriptionId=$(az account show --query id --output tsv)
25 | az role assignment create --role Contributor --subscription $subscriptionId --assignee-object-id $assigneeObjectId --assignee-principal-type ServicePrincipal --scope /subscriptions/$subscriptionId/resourceGroups/$resourceGroupName
26 |
27 | # Create a federated identity credential on the Azure AD app.
28 | az ad app federated-credential create --id $appId --parameters tools/deploy/module0/credential.json
29 |
30 |
--------------------------------------------------------------------------------
/tools/deploy/module0/credential.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "aoai-levelup-oidc",
3 | "issuer": "https://token.actions.githubusercontent.com",
4 | "subject": "repo:your-github-username/azure-openai-api-levelup:ref:refs/heads/main",
5 | "audiences": ["api://AzureADTokenExchange"],
6 | "description": "azure-openai-api-levelup repo"
7 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 |
4 | # Azure Functions localsettings file
5 | local.settings.json
6 |
7 | # User-specific files
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Build results
17 | [Dd]ebug/
18 | [Dd]ebugPublic/
19 | [Rr]elease/
20 | [Rr]eleases/
21 | x64/
22 | x86/
23 | bld/
24 | [Bb]in/
25 | [Oo]bj/
26 | [Ll]og/
27 |
28 | # Visual Studio 2015 cache/options directory
29 | .vs/
30 | # Uncomment if you have tasks that create the project's static files in wwwroot
31 | #wwwroot/
32 |
33 | # MSTest test Results
34 | [Tt]est[Rr]esult*/
35 | [Bb]uild[Ll]og.*
36 |
37 | # NUNIT
38 | *.VisualState.xml
39 | TestResult.xml
40 |
41 | # Build Results of an ATL Project
42 | [Dd]ebugPS/
43 | [Rr]eleasePS/
44 | dlldata.c
45 |
46 | # DNX
47 | project.lock.json
48 | project.fragment.lock.json
49 | artifacts/
50 |
51 | *_i.c
52 | *_p.c
53 | *_i.h
54 | *.ilk
55 | *.meta
56 | *.obj
57 | *.pch
58 | *.pdb
59 | *.pgc
60 | *.pgd
61 | *.rsp
62 | *.sbr
63 | *.tlb
64 | *.tli
65 | *.tlh
66 | *.tmp
67 | *.tmp_proj
68 | *.log
69 | *.vspscc
70 | *.vssscc
71 | .builds
72 | *.pidb
73 | *.svclog
74 | *.scc
75 |
76 | # Chutzpah Test files
77 | _Chutzpah*
78 |
79 | # Visual C++ cache files
80 | ipch/
81 | *.aps
82 | *.ncb
83 | *.opendb
84 | *.opensdf
85 | *.sdf
86 | *.cachefile
87 | *.VC.db
88 | *.VC.VC.opendb
89 |
90 | # Visual Studio profiler
91 | *.psess
92 | *.vsp
93 | *.vspx
94 | *.sap
95 |
96 | # TFS 2012 Local Workspace
97 | $tf/
98 |
99 | # Guidance Automation Toolkit
100 | *.gpState
101 |
102 | # ReSharper is a .NET coding add-in
103 | _ReSharper*/
104 | *.[Rr]e[Ss]harper
105 | *.DotSettings.user
106 |
107 | # JustCode is a .NET coding add-in
108 | .JustCode
109 |
110 | # TeamCity is a build add-in
111 | _TeamCity*
112 |
113 | # DotCover is a Code Coverage Tool
114 | *.dotCover
115 |
116 | # NCrunch
117 | _NCrunch_*
118 | .*crunch*.local.xml
119 | nCrunchTemp_*
120 |
121 | # MightyMoose
122 | *.mm.*
123 | AutoTest.Net/
124 |
125 | # Web workbench (sass)
126 | .sass-cache/
127 |
128 | # Installshield output folder
129 | [Ee]xpress/
130 |
131 | # DocProject is a documentation generator add-in
132 | DocProject/buildhelp/
133 | DocProject/Help/*.HxT
134 | DocProject/Help/*.HxC
135 | DocProject/Help/*.hhc
136 | DocProject/Help/*.hhk
137 | DocProject/Help/*.hhp
138 | DocProject/Help/Html2
139 | DocProject/Help/html
140 |
141 | # Click-Once directory
142 | publish/
143 |
144 | # Publish Web Output
145 | *.[Pp]ublish.xml
146 | *.azurePubxml
147 | # TODO: Comment the next line if you want to checkin your web deploy settings
148 | # but database connection strings (with potential passwords) will be unencrypted
149 | #*.pubxml
150 | *.publishproj
151 |
152 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
153 | # checkin your Azure Web App publish settings, but sensitive information contained
154 | # in these scripts will be unencrypted
155 | PublishScripts/
156 |
157 | # NuGet Packages
158 | *.nupkg
159 | # The packages folder can be ignored because of Package Restore
160 | **/packages/*
161 | # except build/, which is used as an MSBuild target.
162 | !**/packages/build/
163 | # Uncomment if necessary however generally it will be regenerated when needed
164 | #!**/packages/repositories.config
165 | # NuGet v3's project.json files produces more ignoreable files
166 | *.nuget.props
167 | *.nuget.targets
168 |
169 | # Microsoft Azure Build Output
170 | csx/
171 | *.build.csdef
172 |
173 | # Microsoft Azure Emulator
174 | ecf/
175 | rcf/
176 |
177 | # Windows Store app package directories and files
178 | AppPackages/
179 | BundleArtifacts/
180 | Package.StoreAssociation.xml
181 | _pkginfo.txt
182 |
183 | # Visual Studio cache files
184 | # files ending in .cache can be ignored
185 | *.[Cc]ache
186 | # but keep track of directories ending in .cache
187 | !*.[Cc]ache/
188 |
189 | # Others
190 | ClientBin/
191 | ~$*
192 | *~
193 | *.dbmdl
194 | *.dbproj.schemaview
195 | *.jfm
196 | *.pfx
197 | *.publishsettings
198 | node_modules/
199 | orleans.codegen.cs
200 |
201 | # Since there are multiple workflows, uncomment next line to ignore bower_components
202 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
203 | #bower_components/
204 |
205 | # RIA/Silverlight projects
206 | Generated_Code/
207 |
208 | # Backup & report files from converting an old project file
209 | # to a newer Visual Studio version. Backup files are not needed,
210 | # because we have git ;-)
211 | _UpgradeReport_Files/
212 | Backup*/
213 | UpgradeLog*.XML
214 | UpgradeLog*.htm
215 |
216 | # SQL Server files
217 | *.mdf
218 | *.ldf
219 |
220 | # Business Intelligence projects
221 | *.rdl.data
222 | *.bim.layout
223 | *.bim_*.settings
224 |
225 | # Microsoft Fakes
226 | FakesAssemblies/
227 |
228 | # GhostDoc plugin setting file
229 | *.GhostDoc.xml
230 |
231 | # Node.js Tools for Visual Studio
232 | .ntvs_analysis.dat
233 |
234 | # Visual Studio 6 build log
235 | *.plg
236 |
237 | # Visual Studio 6 workspace options file
238 | *.opt
239 |
240 | # Visual Studio LightSwitch build output
241 | **/*.HTMLClient/GeneratedArtifacts
242 | **/*.DesktopClient/GeneratedArtifacts
243 | **/*.DesktopClient/ModelManifest.xml
244 | **/*.Server/GeneratedArtifacts
245 | **/*.Server/ModelManifest.xml
246 | _Pvt_Extensions
247 |
248 | # Paket dependency manager
249 | .paket/paket.exe
250 | paket-files/
251 |
252 | # FAKE - F# Make
253 | .fake/
254 |
255 | # JetBrains Rider
256 | .idea/
257 | *.sln.iml
258 |
259 | # CodeRush
260 | .cr/
261 |
262 | # Python Tools for Visual Studio (PTVS)
263 | __pycache__/
264 | *.pyc
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/Properties/ServiceDependencies/xact-classifier-openai - Zip Deploy/appInsights1.arm.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://schema.management.azure.com/schemas/2018-05-01/subscriptionDeploymentTemplate.json#",
3 | "contentVersion": "1.0.0.0",
4 | "parameters": {
5 | "resourceGroupName": {
6 | "type": "string",
7 | "defaultValue": "xact-classifier-rg",
8 | "metadata": {
9 | "_parameterType": "resourceGroup",
10 | "description": "Name of the resource group for the resource. It is recommended to put resources under same resource group for better tracking."
11 | }
12 | },
13 | "resourceGroupLocation": {
14 | "type": "string",
15 | "defaultValue": "eastus",
16 | "metadata": {
17 | "_parameterType": "location",
18 | "description": "Location of the resource group. Resource groups could have different location than resources."
19 | }
20 | },
21 | "resourceLocation": {
22 | "type": "string",
23 | "defaultValue": "[parameters('resourceGroupLocation')]",
24 | "metadata": {
25 | "_parameterType": "location",
26 | "description": "Location of the resource. By default use resource group's location, unless the resource provider is not supported there."
27 | }
28 | }
29 | },
30 | "resources": [
31 | {
32 | "type": "Microsoft.Resources/resourceGroups",
33 | "name": "[parameters('resourceGroupName')]",
34 | "location": "[parameters('resourceGroupLocation')]",
35 | "apiVersion": "2019-10-01"
36 | },
37 | {
38 | "type": "Microsoft.Resources/deployments",
39 | "name": "[concat(parameters('resourceGroupName'), 'Deployment', uniqueString(concat('xact-classifier-openai', subscription().subscriptionId)))]",
40 | "resourceGroup": "[parameters('resourceGroupName')]",
41 | "apiVersion": "2019-10-01",
42 | "dependsOn": [
43 | "[parameters('resourceGroupName')]"
44 | ],
45 | "properties": {
46 | "mode": "Incremental",
47 | "template": {
48 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
49 | "contentVersion": "1.0.0.0",
50 | "resources": [
51 | {
52 | "name": "xact-classifier-openai",
53 | "type": "microsoft.insights/components",
54 | "location": "[parameters('resourceLocation')]",
55 | "kind": "web",
56 | "properties": {},
57 | "apiVersion": "2015-05-01"
58 | }
59 | ]
60 | }
61 | }
62 | }
63 | ],
64 | "metadata": {
65 | "_dependencyType": "appInsights.azure"
66 | }
67 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/Properties/ServiceDependencies/xact-classifier-openai - Zip Deploy/storage1.arm.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://schema.management.azure.com/schemas/2018-05-01/subscriptionDeploymentTemplate.json#",
3 | "contentVersion": "1.0.0.0",
4 | "parameters": {
5 | "resourceGroupName": {
6 | "type": "string",
7 | "defaultValue": "xact-classifier-rg",
8 | "metadata": {
9 | "_parameterType": "resourceGroup",
10 | "description": "Name of the resource group for the resource. It is recommended to put resources under same resource group for better tracking."
11 | }
12 | },
13 | "resourceGroupLocation": {
14 | "type": "string",
15 | "defaultValue": "eastus",
16 | "metadata": {
17 | "_parameterType": "location",
18 | "description": "Location of the resource group. Resource groups could have different location than resources."
19 | }
20 | },
21 | "resourceLocation": {
22 | "type": "string",
23 | "defaultValue": "[parameters('resourceGroupLocation')]",
24 | "metadata": {
25 | "_parameterType": "location",
26 | "description": "Location of the resource. By default use resource group's location, unless the resource provider is not supported there."
27 | }
28 | }
29 | },
30 | "resources": [
31 | {
32 | "type": "Microsoft.Resources/resourceGroups",
33 | "name": "[parameters('resourceGroupName')]",
34 | "location": "[parameters('resourceGroupLocation')]",
35 | "apiVersion": "2019-10-01"
36 | },
37 | {
38 | "type": "Microsoft.Resources/deployments",
39 | "name": "[concat(parameters('resourceGroupName'), 'Deployment', uniqueString(concat('xactclassifierrg8f36', subscription().subscriptionId)))]",
40 | "resourceGroup": "[parameters('resourceGroupName')]",
41 | "apiVersion": "2019-10-01",
42 | "dependsOn": [
43 | "[parameters('resourceGroupName')]"
44 | ],
45 | "properties": {
46 | "mode": "Incremental",
47 | "template": {
48 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
49 | "contentVersion": "1.0.0.0",
50 | "resources": [
51 | {
52 | "sku": {
53 | "name": "Standard_LRS",
54 | "tier": "Standard"
55 | },
56 | "kind": "Storage",
57 | "name": "xactclassifierrg8f36",
58 | "type": "Microsoft.Storage/storageAccounts",
59 | "location": "[parameters('resourceLocation')]",
60 | "apiVersion": "2017-10-01"
61 | }
62 | ]
63 | }
64 | }
65 | }
66 | ],
67 | "metadata": {
68 | "_dependencyType": "storage.azure"
69 | }
70 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/Properties/launchSettings.json:
--------------------------------------------------------------------------------
1 | {
2 | "profiles": {
3 | "FunctionApp1": {
4 | "commandName": "Project",
5 | "environmentVariables": {
6 | "OPENAI_API_KEY": "8a62beb9c8074123a558eff4c0bfdca2",
7 | "OPENAI_API_BASE": "https://openai-demo-ahmedbham.openai.azure.com/",
8 | "STORAGE_ACCOUNT_CONNECTION_STRING": "DefaultEndpointsProtocol=https;AccountName=xactclassifierahmedbham;AccountKey=zTyIgYos4FtKrJW0AWVqAderZsQJhymQuNmD3IBCNlSpOmFF25KUrAMQYNDk4FulWXUJi6wnko8R+ASt/krmYw==;EndpointSuffix=core.windows.net",
9 | "OPENAI_API_MODEL": "text-davinci-003"
10 | }
11 | }
12 | }
13 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/Properties/serviceDependencies.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "appInsights1": {
4 | "type": "appInsights",
5 | "connectionId": "APPLICATIONINSIGHTS_CONNECTION_STRING"
6 | },
7 | "storage1": {
8 | "type": "storage",
9 | "connectionId": "AzureWebJobsStorage"
10 | }
11 | }
12 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/Properties/serviceDependencies.local.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "appInsights1": {
4 | "type": "appInsights.sdk"
5 | },
6 | "storage1": {
7 | "type": "storage.emulator",
8 | "connectionId": "AzureWebJobsStorage"
9 | }
10 | }
11 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/Properties/serviceDependencies.xact-classifier-openai - Zip Deploy.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "appInsights1": {
4 | "resourceId": "/subscriptions/[parameters('subscriptionId')]/resourceGroups/[parameters('resourceGroupName')]/providers/microsoft.insights/components/xact-classifier-openai",
5 | "type": "appInsights.azure",
6 | "connectionId": "APPLICATIONINSIGHTS_CONNECTION_STRING"
7 | },
8 | "storage1": {
9 | "resourceId": "/subscriptions/[parameters('subscriptionId')]/resourceGroups/[parameters('resourceGroupName')]/providers/Microsoft.Storage/storageAccounts/xactclassifierrg8f36",
10 | "type": "storage.azure",
11 | "connectionId": "AzureWebJobsStorage"
12 | }
13 | }
14 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/TransactionClassifier.cs:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahmedbham/azure-openai-api-levelup/dfe9cc488703d3547f88da47d5918b1407e3c8f7/tools/deploy/module1/TransactionClassifier/TransactionClassifier.cs
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/TransactionClassifier.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 | net6.0
4 | v4
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | PreserveNewest
16 |
17 |
18 | PreserveNewest
19 | Never
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/tools/deploy/module1/TransactionClassifier/host.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "2.0",
3 | "logging": {
4 | "applicationInsights": {
5 | "samplingSettings": {
6 | "isEnabled": true,
7 | "excludedTypes": "Request"
8 | }
9 | }
10 | }
11 | }
--------------------------------------------------------------------------------
/tools/deploy/module1/data/25000_spend_dataset_current_25.csv:
--------------------------------------------------------------------------------
1 | Date,Supplier,Description,Transaction_value,classification
2 | 21/04/2016,M & J Ballantyne Ltd,George IV Bridge Work,35098,
3 | 26/04/2016,Private Sale,Literary & Archival Items,30000,
4 | 30/04/2016,City Of Edinburgh Council,Non Domestic Rates ,40800,
5 | 9/5/2016,Computacenter Uk,Kelvin Hall,72835,
6 | 9/5/2016,John Graham Construction Ltd,Causewayside Refurbishment,64361,
7 | 9/5/2016,A McGillivray,Causewayside Refurbishment,53690,
8 | 16/05/2016,John Graham Construction Ltd,Causewayside Refurbishment,365344,
9 | 23/05/2016,Computacenter Uk,Kelvin Hall,26506,
10 | 23/05/2016,ECG Facilities Service,Facilities Management Charge,32777,
11 | 23/05/2016,ECG Facilities Service,Facilities Management Charge,32777,
12 | 30/05/2016,ALDL,ALDL Charges,32317,
13 | 10/6/2016,Wavetek Ltd,Kelvin Hall,87589,
14 | 10/6/2016,John Graham Construction Ltd,Causewayside Refurbishment,381803,
15 | 28/06/2016,ECG Facilities Service,Facilities Management Charge,32832,
16 | 30/06/2016,Glasgow City Council,Kelvin Hall,1700000,
17 | 11/7/2016,Wavetek Ltd,Kelvin Hall,65692,
18 | 11/7/2016,John Graham Construction Ltd,Causewayside Refurbishment,139845,
19 | 15/07/2016,Sotheby'S,Literary & Archival Items,28500,
20 | 18/07/2016,Christies,Literary & Archival Items,33800,
21 | 25/07/2016,A McGillivray,Causewayside Refurbishment,30113,
22 | 31/07/2016,ALDL,ALDL Charges,32317,
23 | 8/8/2016,ECG Facilities Service,Facilities Management Charge,32795,
24 | 15/08/2016,Creative Video Productions Ltd,Kelvin Hall,26866,
25 | 15/08/2016,John Graham Construction Ltd,Causewayside Refurbishment,196807,
26 |
--------------------------------------------------------------------------------
/tools/deploy/module1/infra/module1-infra.bicep:
--------------------------------------------------------------------------------
1 | // Declare parameters
2 | @description('The unique name for the Azure Storage Account.')
3 | param storageAccountName string = 'opai${uniqueString(resourceGroup().id)}'
4 |
5 | @description('The unique name for the Azure Function App.')
6 | param functionAppName string = 'xact-classifier-openai-${uniqueString(resourceGroup().id)}'
7 |
8 | // Optional params
9 | @description('The region to deploy the cluster. By default this will use the same region as the resource group.')
10 | param location string = resourceGroup().location
11 |
12 | // Declare variables
13 | var storageAccountType = 'Standard_LRS'
14 | var appServicePlanSku = 'S1'
15 |
16 | // Create storage account
17 | resource storageAccount 'Microsoft.Storage/storageAccounts@2022-09-01' = {
18 | name: storageAccountName
19 | location: location
20 | sku: {
21 | name: storageAccountType
22 | }
23 | kind: 'StorageV2'
24 | }
25 |
26 | resource blobService 'Microsoft.Storage/storageAccounts/blobServices@2022-09-01' = {
27 | name: 'default'
28 | parent: storageAccount
29 | }
30 |
31 |
32 | // Create storage containers
33 | resource classificationContainer 'Microsoft.Storage/storageAccounts/blobServices/containers@2022-09-01' = {
34 | name: 'classification'
35 | parent: blobService
36 |
37 | properties: {
38 | publicAccess: 'Blob'
39 | }
40 | }
41 |
42 | resource outputContainer 'Microsoft.Storage/storageAccounts/blobServices/containers@2022-09-01' = {
43 | name: 'output'
44 | parent: blobService
45 | properties: {
46 | publicAccess: 'Blob'
47 | }
48 | }
49 |
50 | // Create app service plan
51 | resource appServicePlan 'Microsoft.Web/serverfarms@2022-03-01' = {
52 | name: '${functionAppName}-plan'
53 | location: location
54 | sku: {
55 | name: appServicePlanSku
56 | tier: 'Standard'
57 | }
58 | }
59 |
60 | // Create function app
61 | resource functionApp 'Microsoft.Web/sites@2022-03-01' = {
62 | name: functionAppName
63 | location: location
64 | kind: 'functionapp'
65 | properties: {
66 | serverFarmId: appServicePlan.id
67 | siteConfig: {
68 | appSettings: [
69 | {
70 | name: 'AzureWebJobsStorage'
71 | value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccountName};AccountKey=${storageAccount.listKeys().keys[0].value};EndpointSuffix=${environment().suffixes.storage}'
72 | }
73 | {
74 | name: 'WEBSITE_CONTENTAZUREFILECONNECTIONSTRING'
75 | value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccountName};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}'
76 | }
77 | {
78 | name: 'WEBSITE_CONTENTSHARE'
79 | value: functionAppName
80 | }
81 | {
82 | name: 'STORAGE_ACCOUNT_CONNECTION_STRING'
83 | value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccountName};EndpointSuffix=${environment().suffixes.storage};AccountKey=${storageAccount.listKeys().keys[0].value}'
84 | }
85 | {
86 | name: 'OPENAI_API_BASE'
87 | value: 'Enter your Azure OpenAI API Endpoint URL'
88 | }
89 | {
90 | name: 'OPENAI_API_KEY'
91 | value: 'Enter your Azure OpenAI API Key'
92 | }
93 | {
94 | name: 'OPENAI_API_MODEL'
95 | value: 'text-davinci-003'
96 | }
97 | {
98 | name: 'FUNCTIONS_EXTENSION_VERSION'
99 | value: '~4'
100 | }
101 | {
102 | name: 'FUNCTIONS_WORKER_RUNTIME'
103 | value: 'dotnet'
104 | }
105 | ]
106 | }
107 | }
108 | }
109 |
110 |
111 |
112 |
113 |
--------------------------------------------------------------------------------
/tools/deploy/module3/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 |
4 | com.github.octodemo
5 | bookstore-security
6 | ${revision}${changelist}${sha1}
7 | jar
8 |
9 | A Java example project to demonstrate a Java development stack with Maven, GitHub Actions, GitHub Package Registry and Azure.
10 |
11 |
12 | UTF-8
13 | UTF-8
14 |
15 | 11
16 |
17 |
20 | 10.0.0
21 | 2.17.2
22 |
23 |
26 | 1.0.0
27 |
28 | -SNAPSHOT
29 |
30 |
31 | msft-demo-resources/template-bookstore-security
32 |
33 |
34 |
35 |
36 |
37 | org.eclipse.jetty
38 | jetty-bom
39 | ${jetty.version}
40 | pom
41 | import
42 |
43 |
44 |
45 |
46 |
47 |
48 | org.eclipse.jetty
49 | jetty-server
50 |
51 |
52 |
53 | org.eclipse.jetty
54 | jetty-servlet
55 |
56 |
57 |
58 | org.thymeleaf
59 | thymeleaf
60 | 3.0.12.RELEASE
61 |
62 |
63 |
64 | org.json
65 | json
66 | 20210307
67 |
68 |
69 |
70 | org.xerial
71 | sqlite-jdbc
72 | 3.32.3.2
73 |
74 |
75 |
76 | org.apache.logging.log4j
77 | log4j-slf4j18-impl
78 | ${log4j.version}
79 |
80 |
81 |
82 | junit
83 | junit
84 | 4.13
85 | test
86 |
87 |
88 |
89 |
90 |
91 |
92 | src/main/resources
93 | true
94 |
95 |
96 |
97 |
98 | src/main/webapp
99 | false
100 |
101 |
102 |
103 |
104 |
105 |
106 | org.apache.maven.plugins
107 | maven-checkstyle-plugin
108 | 3.1.1
109 |
110 |
111 | com.puppycrawl.tools
112 | checkstyle
113 | 8.29
114 |
115 |
116 |
117 |
118 |
119 | org.apache.maven.plugins
120 | maven-compiler-plugin
121 | 3.8.1
122 |
123 |
124 | ${java.version}
125 | ${java.version}
126 |
127 |
128 |
129 |
130 | org.apache.maven.plugins
131 | maven-resources-plugin
132 | 3.1.0
133 |
134 |
135 |
136 | com.github.ekryd.echo-maven-plugin
137 | echo-maven-plugin
138 | 1.3.2
139 |
140 |
141 |
142 | org.jacoco
143 | jacoco-maven-plugin
144 | 0.8.6
145 |
146 |
147 |
148 | org.apache.maven.plugins
149 | maven-shade-plugin
150 | 3.2.4
151 |
152 |
153 |
154 |
155 |
156 |
157 | org.apache.maven.plugins
158 | maven-checkstyle-plugin
159 |
160 |
161 | verify-style
162 | verify
163 |
164 | check
165 |
166 |
167 | true
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 | org.apache.maven.plugins
184 | maven-shade-plugin
185 |
186 |
187 | package
188 |
189 | shade
190 |
191 |
192 | false
193 |
194 |
195 | com.github.demo.DemoServer
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 | coverage-per-test
208 |
209 |
210 |
211 | org.apache.maven.plugins
212 | maven-surefire-plugin
213 | 2.22.2
214 |
215 |
216 |
217 | listener
218 | org.sonar.java.jacoco.JUnitListener
219 |
220 |
221 |
222 |
223 |
224 |
225 | org.jacoco
226 | jacoco-maven-plugin
227 |
228 |
229 |
230 | prepare-jacoco
231 |
232 | prepare-agent
233 |
234 |
235 |
236 |
237 | jacoco-report
238 | test
239 |
240 | report
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 | org.sonarsource.java
251 | sonar-jacoco-listeners
252 | 3.8
253 | test
254 |
255 |
256 |
257 |
258 |
259 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/DemoServer.java:
--------------------------------------------------------------------------------
1 | package com.github.demo;
2 |
3 | import java.net.URL;
4 | import java.net.URI;
5 |
6 | import com.github.demo.servlet.*;
7 |
8 | import org.eclipse.jetty.server.Server;
9 | import org.eclipse.jetty.servlet.DefaultServlet;
10 | import org.eclipse.jetty.servlet.ServletContextHandler;
11 | import org.eclipse.jetty.servlet.ServletHolder;
12 | import org.eclipse.jetty.util.resource.Resource;
13 | import org.slf4j.Logger;
14 | import org.slf4j.LoggerFactory;
15 |
16 | public class DemoServer {
17 |
18 | private static final Logger logger = LoggerFactory.getLogger(DemoServer.class);
19 |
20 | public static void main(String[] args) throws Exception {
21 | String portString = System.getenv("SERVER_PORT");
22 | if (portString == null) {
23 | portString = "8080";
24 | }
25 |
26 | int port = Integer.parseInt(portString);
27 | Server server = new Server(port);
28 |
29 | URI webRootUri;
30 | String staticResources = System.getenv("STATIC_RESOURCES");
31 |
32 | if (staticResources != null) {
33 | logger.info("Using environment variable for static resources; {}", staticResources);
34 | webRootUri = URI.create("file://" + staticResources);
35 | } else {
36 | logger.info("Using embedded static resources");
37 | URL url = DemoServer.class.getClassLoader().getResource("static/");
38 | webRootUri = url.toURI();
39 | }
40 | logger.info("Webserver static resources: {}", webRootUri);
41 |
42 | ServletContextHandler ctxHandler = new ServletContextHandler(ServletContextHandler.SESSIONS);
43 | ctxHandler.setContextPath("/");
44 | ctxHandler.setBaseResource(Resource.newResource(webRootUri));
45 |
46 | ServletHolder staticFiles = new ServletHolder("static-home", DefaultServlet.class);
47 | staticFiles.setInitParameter("resourceBase", webRootUri.toString());
48 | staticFiles.setInitParameter("dirAllowed","true");
49 | staticFiles.setInitParameter("pathInfoOnly","true");
50 | ctxHandler.addServlet(staticFiles,"/static/*");
51 |
52 | ctxHandler.addServlet(StatusServlet.class, "/status");
53 |
54 | // Default servlet path, must be last
55 | ServletHolder books = new ServletHolder("/", BookServlet.class);
56 | books.setInitParameter("dirAllowed","false");
57 | books.setInitParameter("pathInfoOnly","true");
58 | ctxHandler.addServlet(books, "/");
59 |
60 | server.setHandler(ctxHandler);
61 |
62 | server.start();
63 |
64 | try {
65 | URI serverUri = server.getURI();
66 | if (serverUri != null) {
67 | logServerWithUri(serverUri);
68 | } else {
69 | logServerStartWithUnresolvedUri(port);
70 | }
71 | } catch (Exception e) {
72 | logger.error("Failure resolving URI: {}", e.getMessage(), e);
73 | logServerStartWithUnresolvedUri(port);
74 | }
75 | server.join();
76 | }
77 |
78 | private static void logServerStartWithUnresolvedUri(int port) {
79 | logger.info("**********************************************************************************************************");
80 | logger.info("Started DemoServer, but server URI could not be determined, try accessing on http://localhost:{}", port);
81 | logger.info("**********************************************************************************************************");
82 | }
83 |
84 | private static void logServerWithUri(URI serverUri) {
85 | logger.info("**********************************************************************************************************");
86 | logger.info("Started DemoServer; available at: {}://localhost:{}", serverUri.getScheme(), serverUri.getPort());
87 | logger.info("**********************************************************************************************************");
88 | }
89 | }
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/model/Book.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.model;
2 |
3 | /**
4 | * Model class for book.
5 | */
6 | public class Book {
7 |
8 | private String title;
9 |
10 | private String author;
11 |
12 | private String cover;
13 |
14 | public Book() {
15 |
16 | }
17 |
18 | public Book(String author, String title) {
19 | this.author = author;
20 | this.title = title;
21 | }
22 |
23 | public Book(String author, String title, String cover) {
24 | this.author = author;
25 | this.title = title;
26 | this.cover = cover;
27 | }
28 |
29 | public String getTitle() {
30 | return title;
31 | }
32 |
33 | public void setTitle(String title) {
34 | this.title = title;
35 | }
36 |
37 | public String getAuthor() {
38 | return author;
39 | }
40 |
41 | public void setAuthor(String author) {
42 | this.author = author;
43 | }
44 |
45 | public String getDetails() {
46 | return author + " " + title;
47 | }
48 |
49 | public String getCover() {
50 | return cover;
51 | }
52 |
53 | public void setCover(String cover) {
54 | this.cover = cover;
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/service/BookDatabase.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.service;
2 |
3 | import com.github.demo.model.Book;
4 |
5 | import java.util.List;
6 | import java.util.Collection;
7 |
8 | public interface BookDatabase {
9 |
10 | List getAll() throws BookServiceException;
11 |
12 | List getBooksByTitle(String name) throws BookServiceException;
13 |
14 | void populate(Collection books) throws BookServiceException;
15 |
16 | void destroy() throws BookServiceException;
17 | }
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/service/BookDatabaseImpl.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.service;
2 |
3 | import com.github.demo.model.Book;
4 |
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 |
8 | import java.sql.Connection;
9 | import java.sql.DriverManager;
10 | import java.sql.PreparedStatement;
11 | import java.sql.ResultSet;
12 | import java.sql.SQLException;
13 | import java.sql.Statement;
14 |
15 | import java.util.Properties;
16 | import java.util.Collection;
17 | import java.util.List;
18 | import java.util.ArrayList;
19 |
20 | public class BookDatabaseImpl implements BookDatabase {
21 |
22 | public static final Logger logger = LoggerFactory.getLogger(BookDatabaseImpl.class);
23 |
24 | /** The maximum number of retries for connecting with the database. */
25 | private static final int MAX_CONNECTION_RETRIES = 10;
26 |
27 | /** The time in ms to backoff on a connection failure to the database. */
28 | private static final int RETRY_BACKOFF = 3000;
29 |
30 | /** The connection for the database. */
31 | private Connection connection;
32 |
33 | /**
34 | * Obtain the maximum number of database connection retires before giving up
35 | * @return The number of times to attempt to reconnect to the database.
36 | */
37 | private static final int getMaxRetries() {
38 | String retries = System.getenv("DATABASE_RETRIES");
39 | if (retries != null) {
40 | return Integer.parseInt(retries);
41 | } else {
42 | return MAX_CONNECTION_RETRIES;
43 | }
44 | }
45 |
46 | public BookDatabaseImpl() throws BookServiceException {
47 | this(null, null, null);
48 | }
49 |
50 | public BookDatabaseImpl(String url, String username, String password) throws BookServiceException {
51 | Properties props = new Properties();
52 |
53 | if (username != null) {
54 | props.setProperty("user", username);
55 | }
56 | if (password != null) {
57 | props.setProperty("password", password);
58 | }
59 | // This is a postgres specific setting, but SQLlite tolerates it
60 | props.setProperty("ssl", "false");
61 |
62 | // Default to a sqlite in memory database if no database url has been provided
63 | if (url == null) {
64 | url = "jdbc:sqlite::memory:";
65 | }
66 |
67 | connection = getConnection(url, props);
68 |
69 | // Populate our in-memory database with data, if this is what we are
70 | if (connection != null && url.indexOf(":memory:") > -1) {
71 | initializeAndPopulateDatabase();
72 | }
73 | }
74 |
75 | public boolean isValid() {
76 | return connection != null;
77 | }
78 |
79 | @Override
80 | public List getAll() throws BookServiceException {
81 | List books = new ArrayList();
82 |
83 | if (!isValid()) {
84 | throw new BookServiceException("Database connection is not valid, check logs for failure details.");
85 | }
86 |
87 | try {
88 | Statement stmt = connection.createStatement();
89 |
90 | ResultSet rs = stmt.executeQuery("SELECT * FROM books");
91 | while (rs.next()) {
92 | Book book = new Book(
93 | rs.getString("author"),
94 | rs.getString("title"),
95 | rs.getString("image")
96 | );
97 | books.add(book);
98 | }
99 | } catch (SQLException e) {
100 | logger.error("Failed to obtain books", e);
101 | throw new BookServiceException(e);
102 | }
103 | return books;
104 | }
105 |
106 | @Override
107 | public List getBooksByTitle(String name) throws BookServiceException {
108 | List books = new ArrayList();
109 |
110 | if (!isValid()) {
111 | throw new BookServiceException("Database connection is not valid, check logs for failure details.");
112 | }
113 |
114 | Statement stmt = null;
115 |
116 | try {
117 | stmt = connection.createStatement();
118 | String query = "SELECT * FROM books WHERE title LIKE '%" + name + "%'";
119 |
120 | ResultSet results = stmt.executeQuery(query);
121 |
122 | while (results.next()) {
123 | Book book = new Book(
124 | results.getString("author"),
125 | results.getString("title"),
126 | results.getString("image")
127 | );
128 | books.add(book);
129 | }
130 | } catch (SQLException e) {
131 | logger.error("Failed while searching for {}'", name);
132 | throw new BookServiceException(e);
133 | } finally {
134 | try {
135 | if (stmt != null) {
136 | stmt.close();
137 | }
138 | } catch (SQLException se) {
139 | // Do nothing
140 | } finally {
141 | stmt = null;
142 | }
143 | }
144 | return books;
145 | }
146 |
147 | @Override
148 | public void destroy() {
149 | try {
150 | if (connection != null) {
151 | connection.close();
152 | }
153 | } catch (SQLException e) {
154 | // Ignore
155 | connection = null;
156 | }
157 | }
158 |
159 | @Override
160 | public void populate(Collection books) throws BookServiceException {
161 | if (books != null && books.size() > 0) {
162 | PreparedStatement ps = null;
163 |
164 | try {
165 | ps = connection.prepareStatement("INSERT INTO books (title, author, image, rating) VALUES(?, ?, ?, ?)");
166 |
167 | for (Book book : books) {
168 | logger.info("Adding book to database: {}", book.getTitle());
169 | ps.setString(1, book.getTitle());
170 | ps.setString(2, book.getAuthor());
171 | ps.setString(3, book.getCover());
172 | ps.execute();
173 | }
174 | logger.info("Database populated.");
175 | } catch (SQLException se) {
176 | logger.error("Failure when populating database", se);
177 | throw new BookServiceException(se);
178 | } finally {
179 | try {
180 | if (ps != null) {
181 | ps.close();
182 | }
183 | } catch (SQLException e) {
184 | // Do nothing
185 | ps = null;
186 | }
187 | }
188 | }
189 | }
190 |
191 | /**
192 | * Initializes the internal database structure and populates it with our default
193 | * data.
194 | */
195 | private void initializeAndPopulateDatabase() throws BookServiceException {
196 | Statement statement = null;
197 | try {
198 | // Initialize the database tables for in memory database
199 | statement = connection.createStatement();
200 |
201 | statement.execute("CREATE TABLE IF NOT EXISTS books ("
202 | + "id INTEGER PRIMARY KEY, "
203 | + "title TEXT NOT NULL, "
204 | + "author TEXT, "
205 | + "image TEXT, "
206 | + "rating, INTEGER "
207 | + ")"
208 | );
209 | // Populate the database with some sample data
210 | populate(BookUtils.getSampleBooks());
211 | } catch (SQLException e) {
212 | if (statement != null) {
213 | try {
214 | statement.close();
215 | } catch(SQLException se) {
216 | // ignore
217 | }
218 | }
219 | throw new BookServiceException(e);
220 | }
221 | }
222 |
223 | private Connection getConnection(String url, Properties props) throws BookServiceException {
224 | Connection connection = null;
225 | int retryCount = 0;
226 | int maxRetries = getMaxRetries();
227 |
228 | logger.debug("Connecting to database: " + url);
229 |
230 | do {
231 | try {
232 | connection = DriverManager.getConnection(url, props);
233 | } catch (SQLException e) {
234 | retryCount++;
235 |
236 | logger.warn("Failed to connect to database, reties: " + retryCount);
237 | logger.warn(e.getMessage());
238 |
239 | try {
240 | logger.info("Backing off before retrying database connection for " + RETRY_BACKOFF + "ms.");
241 | Thread.sleep(RETRY_BACKOFF);
242 | } catch (InterruptedException e1) {
243 | logger.error("Failed to sleep: " + e1.getMessage());
244 | }
245 | }
246 | }
247 | while (connection == null && retryCount < maxRetries);
248 |
249 | logger.info("Database Connection successful? " + (connection != null));
250 | return connection;
251 | }
252 | }
253 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/service/BookService.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.service;
2 |
3 | import com.github.demo.model.Book;
4 |
5 | import java.util.List;
6 |
7 | public class BookService {
8 |
9 | private BookDatabase booksDatabase;
10 |
11 | public BookService() throws BookServiceException {
12 | String databaseUrl = System.getenv("DATABASE_URL");
13 | String databaseUser = System.getenv("DATABASE_USER");
14 | String databasePassword = System.getenv("DATABASE_PASSWORD");
15 |
16 | try {
17 | booksDatabase = new BookDatabaseImpl(databaseUrl, databaseUser, databasePassword);
18 | } catch (BookServiceException e) {
19 | throw new BookServiceException(e);
20 | }
21 | }
22 |
23 | public List getBooks() throws BookServiceException {
24 | return this.booksDatabase.getAll();
25 | }
26 |
27 | public List searchBooks(String name) throws BookServiceException {
28 | return this.booksDatabase.getBooksByTitle(name);
29 | }
30 | }
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/service/BookServiceException.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.service;
2 |
3 | /**
4 | * Custom BookService Exception for caturing failures in building/starting the books service.
5 | */
6 | public class BookServiceException extends Exception {
7 |
8 | private static final long serialVersionUID = 1L;
9 |
10 | public BookServiceException(Exception e) {
11 | super(e);
12 | }
13 |
14 | public BookServiceException(String message) {
15 | super(message);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/service/BookUtils.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.service;
2 |
3 | import java.util.ArrayList;
4 | import java.util.List;
5 |
6 | import com.github.demo.model.Book;
7 |
8 | public class BookUtils {
9 |
10 | public static List getSampleBooks() {
11 | List books = new ArrayList(6);
12 |
13 | books.add(new Book("Jeff Sutherland","Scrum: The Art of Doing Twice the Work in Half the Time", "scrum.jpg"));
14 | books.add(new Book("Eric Ries","The Lean Startup: How Constant Innovation Creates Radically Successful Businesses", "lean.jpg"));
15 | books.add(new Book("Geoffrey A. Moore","Crossing the Chasm", "chasm.jpg"));
16 | books.add(new Book("David Thomas","The Pragmatic Programmer: From Journeyman to Master", "pragmatic.jpg"));
17 | books.add(new Book("Frederick P. Brooks Jr.", "The Mythical Man-Month: Essays on Software Engineering", "month.jpg"));
18 | books.add(new Book("Steve Krug","Don't Make Me Think, Revisited: A Common Sense Approach to Web Usability", "think.jpg"));
19 |
20 | return books;
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/servlet/BookServlet.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.servlet;
2 |
3 | import com.github.demo.model.Book;
4 | import com.github.demo.service.BookService;
5 | import com.github.demo.service.BookServiceException;
6 |
7 | import org.slf4j.Logger;
8 | import org.slf4j.LoggerFactory;
9 | import org.thymeleaf.TemplateEngine;
10 | import org.thymeleaf.context.WebContext;
11 | import org.thymeleaf.templateresolver.ServletContextTemplateResolver;
12 |
13 | import javax.servlet.ServletException;
14 | import javax.servlet.http.HttpServlet;
15 | import javax.servlet.http.HttpServletRequest;
16 | import javax.servlet.http.HttpServletResponse;
17 | import java.io.IOException;
18 | import java.util.Calendar;
19 | import java.util.List;
20 | import java.util.Properties;
21 |
22 |
23 | public class BookServlet extends HttpServlet {
24 |
25 | private static final long serialVersionUID = 1L;
26 |
27 | private static final Logger logger = LoggerFactory.getLogger(BookServlet.class);
28 |
29 | private BookService bookService;
30 |
31 | public BookServlet() throws Exception {
32 | logger.info("Starting Bookstore Servlet...");
33 | try {
34 | bookService = new BookService();
35 | } catch (BookServiceException e) {
36 | logger.error("Failed to instantiate BookService: " + e.getMessage());
37 | throw e;
38 | }
39 | }
40 |
41 | @Override
42 | public void doPost(HttpServletRequest req, HttpServletResponse resp)
43 | throws ServletException, IOException {
44 | doGet(req, resp);
45 | }
46 |
47 | @Override
48 | protected void doGet(HttpServletRequest req, HttpServletResponse resp)
49 | throws ServletException, IOException {
50 |
51 | Properties versionProperties = new Properties();
52 | versionProperties.load(getClass().getResourceAsStream("/version.properties"));
53 |
54 | ServletContextTemplateResolver resolver = new ServletContextTemplateResolver(req.getServletContext());
55 | resolver.setPrefix("/");
56 | resolver.setSuffix(".html");
57 |
58 | TemplateEngine engine = new TemplateEngine();
59 | engine.setTemplateResolver(resolver);
60 |
61 | WebContext ctx = new WebContext(req, resp, getServletContext(), req.getLocale());
62 | ctx.setVariable("modified", Calendar.getInstance());
63 | ctx.setVariable("version", versionProperties.getProperty("version"));
64 |
65 | resp.setContentType("text/html; charset=UTF-8");
66 |
67 | try {
68 | List books = bookService.getBooks();
69 | ctx.setVariable("books", books);
70 | engine.process("books", ctx, resp.getWriter());
71 | } catch (BookServiceException e) {
72 | ctx.setVariable("error", e.getMessage());
73 | engine.process("error", ctx, resp.getWriter());
74 | }
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/java/com/github/demo/servlet/StatusServlet.java:
--------------------------------------------------------------------------------
1 | package com.github.demo.servlet;
2 |
3 | import javax.servlet.ServletException;
4 | import javax.servlet.http.HttpServlet;
5 | import javax.servlet.http.HttpServletRequest;
6 | import javax.servlet.http.HttpServletResponse;
7 | import java.io.IOException;
8 |
9 | /**
10 | * A status page servlet that will report success if the application has been instantiated.
11 | * this provides a useful status check for containers and deployment purposes.
12 | */
13 | public class StatusServlet extends HttpServlet {
14 |
15 | private static final long serialVersionUID = 1L;
16 |
17 | public StatusServlet() {
18 | }
19 |
20 | protected void doGet(HttpServletRequest req, HttpServletResponse resp)
21 | throws ServletException, IOException {
22 |
23 | resp.setContentType("text/html; charset=UTF-8");
24 | resp.getWriter().write("ok");
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/resources/version.properties:
--------------------------------------------------------------------------------
1 | version=${project.version}
--------------------------------------------------------------------------------
/tools/deploy/module3/src/main/webapp/static/books.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
34 |
35 |
36 |
37 |
61 |
62 |