├── .github
├── CODE_OF_CONDUCT.md
├── ISSUE_TEMPLATE.md
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE.md
├── README.md
├── csv
├── customer.fmt
├── customer.tbl
├── video_games.csv
└── video_games.csv.readme.txt
├── json
├── json-generator.txt
├── user1.json
├── user2.json
├── user3.json
└── users.json
└── scripts
├── .env.ps1.template
├── 00-on-prem-tools-user-setup.sql
├── 01-import-csv.ps1
├── 02-import-bcp.ps1
└── 03-bulkinsert-openrowset.sql
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Microsoft Open Source Code of Conduct
2 |
3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4 |
5 | Resources:
6 |
7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
10 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
4 | > Please provide us with the following information:
5 | > ---------------------------------------------------------------
6 |
7 | ### This issue is for a: (mark with an `x`)
8 | ```
9 | - [ ] bug report -> please search issues before submitting
10 | - [ ] feature request
11 | - [ ] documentation issue or request
12 | - [ ] regression (a behavior that used to work and stopped in a new release)
13 | ```
14 |
15 | ### Minimal steps to reproduce
16 | >
17 |
18 | ### Any log messages given by the failure
19 | >
20 |
21 | ### Expected/desired behavior
22 | >
23 |
24 | ### OS and Version?
25 | > Windows 7, 8 or 10. Linux (which distribution). macOS (Yosemite? El Capitan? Sierra?)
26 |
27 | ### Versions
28 | >
29 |
30 | ### Mention any other details that might be useful
31 |
32 | > ---------------------------------------------------------------
33 | > Thanks! We'll be in touch soon.
34 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Purpose
2 |
3 | * ...
4 |
5 | ## Does this introduce a breaking change?
6 |
7 | ```
8 | [ ] Yes
9 | [ ] No
10 | ```
11 |
12 | ## Pull Request Type
13 | What kind of change does this Pull Request introduce?
14 |
15 |
16 | ```
17 | [ ] Bugfix
18 | [ ] Feature
19 | [ ] Code style update (formatting, local variables)
20 | [ ] Refactoring (no functional changes, no api changes)
21 | [ ] Documentation content changes
22 | [ ] Other... Please describe:
23 | ```
24 |
25 | ## How to Test
26 | * Get the code
27 |
28 | ```
29 | git clone [repo-address]
30 | cd [repo-name]
31 | git checkout [branch-name]
32 | npm install
33 | ```
34 |
35 | * Test the code
36 |
37 | ```
38 | ```
39 |
40 | ## What to Check
41 | Verify that the following are valid
42 | * ...
43 |
44 | ## Other Information
45 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Mono auto generated files
17 | mono_crash.*
18 |
19 | # Build results
20 | [Dd]ebug/
21 | [Dd]ebugPublic/
22 | [Rr]elease/
23 | [Rr]eleases/
24 | x64/
25 | x86/
26 | [Aa][Rr][Mm]/
27 | [Aa][Rr][Mm]64/
28 | bld/
29 | [Bb]in/
30 | [Oo]bj/
31 | [Ll]og/
32 | [Ll]ogs/
33 |
34 | # Visual Studio 2015/2017 cache/options directory
35 | .vs/
36 | # Uncomment if you have tasks that create the project's static files in wwwroot
37 | #wwwroot/
38 |
39 | # Visual Studio 2017 auto generated files
40 | Generated\ Files/
41 |
42 | # MSTest test Results
43 | [Tt]est[Rr]esult*/
44 | [Bb]uild[Ll]og.*
45 |
46 | # NUnit
47 | *.VisualState.xml
48 | TestResult.xml
49 | nunit-*.xml
50 |
51 | # Build Results of an ATL Project
52 | [Dd]ebugPS/
53 | [Rr]eleasePS/
54 | dlldata.c
55 |
56 | # Benchmark Results
57 | BenchmarkDotNet.Artifacts/
58 |
59 | # .NET Core
60 | project.lock.json
61 | project.fragment.lock.json
62 | artifacts/
63 |
64 | # StyleCop
65 | StyleCopReport.xml
66 |
67 | # Files built by Visual Studio
68 | *_i.c
69 | *_p.c
70 | *_h.h
71 | *.ilk
72 | *.meta
73 | *.obj
74 | *.iobj
75 | *.pch
76 | *.pdb
77 | *.ipdb
78 | *.pgc
79 | *.pgd
80 | *.rsp
81 | *.sbr
82 | *.tlb
83 | *.tli
84 | *.tlh
85 | *.tmp
86 | *.tmp_proj
87 | *_wpftmp.csproj
88 | *.log
89 | *.vspscc
90 | *.vssscc
91 | .builds
92 | *.pidb
93 | *.svclog
94 | *.scc
95 |
96 | # Chutzpah Test files
97 | _Chutzpah*
98 |
99 | # Visual C++ cache files
100 | ipch/
101 | *.aps
102 | *.ncb
103 | *.opendb
104 | *.opensdf
105 | *.sdf
106 | *.cachefile
107 | *.VC.db
108 | *.VC.VC.opendb
109 |
110 | # Visual Studio profiler
111 | *.psess
112 | *.vsp
113 | *.vspx
114 | *.sap
115 |
116 | # Visual Studio Trace Files
117 | *.e2e
118 |
119 | # TFS 2012 Local Workspace
120 | $tf/
121 |
122 | # Guidance Automation Toolkit
123 | *.gpState
124 |
125 | # ReSharper is a .NET coding add-in
126 | _ReSharper*/
127 | *.[Rr]e[Ss]harper
128 | *.DotSettings.user
129 |
130 | # TeamCity is a build add-in
131 | _TeamCity*
132 |
133 | # DotCover is a Code Coverage Tool
134 | *.dotCover
135 |
136 | # AxoCover is a Code Coverage Tool
137 | .axoCover/*
138 | !.axoCover/settings.json
139 |
140 | # Visual Studio code coverage results
141 | *.coverage
142 | *.coveragexml
143 |
144 | # NCrunch
145 | _NCrunch_*
146 | .*crunch*.local.xml
147 | nCrunchTemp_*
148 |
149 | # MightyMoose
150 | *.mm.*
151 | AutoTest.Net/
152 |
153 | # Web workbench (sass)
154 | .sass-cache/
155 |
156 | # Installshield output folder
157 | [Ee]xpress/
158 |
159 | # DocProject is a documentation generator add-in
160 | DocProject/buildhelp/
161 | DocProject/Help/*.HxT
162 | DocProject/Help/*.HxC
163 | DocProject/Help/*.hhc
164 | DocProject/Help/*.hhk
165 | DocProject/Help/*.hhp
166 | DocProject/Help/Html2
167 | DocProject/Help/html
168 |
169 | # Click-Once directory
170 | publish/
171 |
172 | # Publish Web Output
173 | *.[Pp]ublish.xml
174 | *.azurePubxml
175 | # Note: Comment the next line if you want to checkin your web deploy settings,
176 | # but database connection strings (with potential passwords) will be unencrypted
177 | *.pubxml
178 | *.publishproj
179 |
180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
181 | # checkin your Azure Web App publish settings, but sensitive information contained
182 | # in these scripts will be unencrypted
183 | PublishScripts/
184 |
185 | # NuGet Packages
186 | *.nupkg
187 | # NuGet Symbol Packages
188 | *.snupkg
189 | # The packages folder can be ignored because of Package Restore
190 | **/[Pp]ackages/*
191 | # except build/, which is used as an MSBuild target.
192 | !**/[Pp]ackages/build/
193 | # Uncomment if necessary however generally it will be regenerated when needed
194 | #!**/[Pp]ackages/repositories.config
195 | # NuGet v3's project.json files produces more ignorable files
196 | *.nuget.props
197 | *.nuget.targets
198 |
199 | # Microsoft Azure Build Output
200 | csx/
201 | *.build.csdef
202 |
203 | # Microsoft Azure Emulator
204 | ecf/
205 | rcf/
206 |
207 | # Windows Store app package directories and files
208 | AppPackages/
209 | BundleArtifacts/
210 | Package.StoreAssociation.xml
211 | _pkginfo.txt
212 | *.appx
213 | *.appxbundle
214 | *.appxupload
215 |
216 | # Visual Studio cache files
217 | # files ending in .cache can be ignored
218 | *.[Cc]ache
219 | # but keep track of directories ending in .cache
220 | !?*.[Cc]ache/
221 |
222 | # Others
223 | ClientBin/
224 | ~$*
225 | *~
226 | *.dbmdl
227 | *.dbproj.schemaview
228 | *.jfm
229 | *.pfx
230 | *.publishsettings
231 | orleans.codegen.cs
232 |
233 | # Including strong name files can present a security risk
234 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
235 | #*.snk
236 |
237 | # Since there are multiple workflows, uncomment next line to ignore bower_components
238 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
239 | #bower_components/
240 |
241 | # RIA/Silverlight projects
242 | Generated_Code/
243 |
244 | # Backup & report files from converting an old project file
245 | # to a newer Visual Studio version. Backup files are not needed,
246 | # because we have git ;-)
247 | _UpgradeReport_Files/
248 | Backup*/
249 | UpgradeLog*.XML
250 | UpgradeLog*.htm
251 | ServiceFabricBackup/
252 | *.rptproj.bak
253 |
254 | # SQL Server files
255 | *.mdf
256 | *.ldf
257 | *.ndf
258 |
259 | # Business Intelligence projects
260 | *.rdl.data
261 | *.bim.layout
262 | *.bim_*.settings
263 | *.rptproj.rsuser
264 | *- [Bb]ackup.rdl
265 | *- [Bb]ackup ([0-9]).rdl
266 | *- [Bb]ackup ([0-9][0-9]).rdl
267 |
268 | # Microsoft Fakes
269 | FakesAssemblies/
270 |
271 | # GhostDoc plugin setting file
272 | *.GhostDoc.xml
273 |
274 | # Node.js Tools for Visual Studio
275 | .ntvs_analysis.dat
276 | node_modules/
277 |
278 | # Visual Studio 6 build log
279 | *.plg
280 |
281 | # Visual Studio 6 workspace options file
282 | *.opt
283 |
284 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
285 | *.vbw
286 |
287 | # Visual Studio LightSwitch build output
288 | **/*.HTMLClient/GeneratedArtifacts
289 | **/*.DesktopClient/GeneratedArtifacts
290 | **/*.DesktopClient/ModelManifest.xml
291 | **/*.Server/GeneratedArtifacts
292 | **/*.Server/ModelManifest.xml
293 | _Pvt_Extensions
294 |
295 | # Paket dependency manager
296 | .paket/paket.exe
297 | paket-files/
298 |
299 | # FAKE - F# Make
300 | .fake/
301 |
302 | # CodeRush personal settings
303 | .cr/personal
304 |
305 | # Python Tools for Visual Studio (PTVS)
306 | __pycache__/
307 | *.pyc
308 |
309 | # Cake - Uncomment if you are using it
310 | # tools/**
311 | # !tools/packages.config
312 |
313 | # Tabs Studio
314 | *.tss
315 |
316 | # Telerik's JustMock configuration file
317 | *.jmconfig
318 |
319 | # BizTalk build output
320 | *.btp.cs
321 | *.btm.cs
322 | *.odx.cs
323 | *.xsd.cs
324 |
325 | # OpenCover UI analysis results
326 | OpenCover/
327 |
328 | # Azure Stream Analytics local run output
329 | ASALocalRun/
330 |
331 | # MSBuild Binary and Structured Log
332 | *.binlog
333 |
334 | # NVidia Nsight GPU debugger configuration file
335 | *.nvuser
336 |
337 | # MFractors (Xamarin productivity tool) working folder
338 | .mfractor/
339 |
340 | # Local History for Visual Studio
341 | .localhistory/
342 |
343 | # BeatPulse healthcheck temp database
344 | healthchecksdb
345 |
346 | # Backup folder for Package Reference Convert tool in Visual Studio 2017
347 | MigrationBackup/
348 |
349 | # Ionide (cross platform F# VS Code tools) working folder
350 | .ionide/
351 |
352 | # Custom
353 | .env.ps1
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Azure SQL DB Import Data Samples
2 |
3 |
4 | ## 1.0.0 (2021-01-05)
5 |
6 | First Release
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to the project
2 |
3 | This project welcomes contributions and suggestions. Most contributions require you to agree to a
4 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
5 | the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
6 |
7 | When you submit a pull request, a CLA bot will automatically determine whether you need to provide
8 | a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
9 | provided by the bot. You will only need to do this once across all repos using our CLA.
10 |
11 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
12 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
13 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
14 |
15 | - [Code of Conduct](#coc)
16 | - [Issues and Bugs](#issue)
17 | - [Feature Requests](#feature)
18 | - [Submission Guidelines](#submit)
19 |
20 | ## Code of Conduct
21 | Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
22 |
23 | ## Found an Issue?
24 | If you find a bug in the source code or a mistake in the documentation, you can help us by
25 | [submitting an issue](#submit-issue) to the GitHub Repository. Even better, you can
26 | [submit a Pull Request](#submit-pr) with a fix.
27 |
28 | ## Want a Feature?
29 | You can *request* a new feature by [submitting an issue](#submit-issue) to the GitHub
30 | Repository. If you would like to *implement* a new feature, please submit an issue with
31 | a proposal for your work first, to be sure that we can use it.
32 |
33 | * **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
34 |
35 | ## Submission Guidelines
36 |
37 | ### Submitting an Issue
38 | Before you submit an issue, search the archive, maybe your question was already answered.
39 |
40 | If your issue appears to be a bug, and hasn't been reported, open a new issue.
41 | Help us to maximize the effort we can spend fixing issues and adding new
42 | features, by not reporting duplicate issues. Providing the following information will increase the
43 | chances of your issue being dealt with quickly:
44 |
45 | * **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps
46 | * **Version** - what version is affected (e.g. 0.1.2)
47 | * **Motivation for or Use Case** - explain what are you trying to do and why the current behavior is a bug for you
48 | * **Browsers and Operating System** - is this a problem with all browsers?
49 | * **Reproduce the Error** - provide a live example or a unambiguous set of steps
50 | * **Related Issues** - has a similar issue been reported before?
51 | * **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be
52 | causing the problem (line of code or commit)
53 |
54 | You can file new issues by providing the above information at the corresponding repository's issues link: https://github.com/[organization-name]/[repository-name]/issues/new].
55 |
56 | ### Submitting a Pull Request (PR)
57 | Before you submit your Pull Request (PR) consider the following guidelines:
58 |
59 | * Search the repository (https://github.com/[organization-name]/[repository-name]/pulls) for an open or closed PR
60 | that relates to your submission. You don't want to duplicate effort.
61 |
62 | * Make your changes in a new git fork:
63 |
64 | * Commit your changes using a descriptive commit message
65 | * Push your fork to GitHub:
66 | * In GitHub, create a pull request
67 | * If we suggest changes then:
68 | * Make the required updates.
69 | * Rebase your fork and force push to your GitHub repository (this will update your Pull Request):
70 |
71 | ```shell
72 | git rebase master -i
73 | git push -f
74 | ```
75 |
76 | That's it! Thank you for your contribution!
77 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_type: sample
3 | languages:
4 | - tsql
5 | - sql
6 | - json
7 | products:
8 | - azure
9 | - azure-sql-database
10 | - azure-data-factory
11 | - azure-databricks
12 | description: "Samples on how to import data (JSON, CSV, Flat-Files, etc) into Azure SQL"
13 | urlFragment: azure-sql-db-import-data-samples
14 | ---
15 |
16 |
23 |
24 | # Azure SQL DB Import Data Samples
25 |
26 | Samples on how to import data (JSON, CSV, Flat-Files, etc) into Azure SQL
27 |
28 | All samples are in the `script` folder. Sample data used for running the samples is in `json` and `csv` folder.
29 |
30 | ## Pre-Requisites
31 |
32 | ### Have an Azure SQL database
33 |
34 | Make sure you have an database in Azure that you can use for tests. If you are new to Azure SQL and need help in creating a new database, make sure to watch this 5 minutes video:
35 |
36 | [Demo: Deploy Azure SQL Database](https://channel9.msdn.com/Series/Azure-SQL-for-Beginners/Demo-Deploy-Azure-SQL-Database-14-of-61)
37 |
38 | Remember that speed of import is always tied to the maximum "Log Rate Limits" that the database tier has. More detail on this here in this article: [Raising log rate limits for General Purpose service tier in Azure SQL Database](https://techcommunity.microsoft.com/t5/azure-sql/raising-log-rate-limits-for-general-purpose-service-tier-in/ba-p/1784622).
39 |
40 | Remember that Azure SQL Hyperscale have a 100 MB/Sec limit no matter then number of .vCores: [How much time would it take to bring in X amount of data to Hyperscale](https://docs.microsoft.com/en-us/azure/azure-sql/database/service-tier-hyperscale-frequently-asked-questions-faq#how-much-time-would-it-take-to-bring-in-x-amount-of-data-to-hyperscale)
41 |
42 | ### Run the setup script
43 |
44 | Run the script `00-on-prem-tools-user-setup.sql` in order to have the used `customer` table ready to be used.
45 |
46 | The script will also create a demo user that will be used to run the script. Feel free to change user name and password if you wish.
47 |
48 | ### Configure the .env.ps1 file
49 |
50 | Create a `.env.ps1` file in the `script` folder using the provided `.env.ps1.template` file. Make sure to fill the variables with the correct that to access the demo Azure SQL database that you have decided to use.
51 |
52 | ## Use BCP
53 |
54 | BCP (Bulk Copy Program) is of course an option, probably the easiest one and one of the fastest. Make sure to get the latest version from: [bcp Utility](https://docs.microsoft.com/en-us/sql/tools/bcp-utility?view=sql-server-ver15#download-the-latest-version-of-bcp-utility)
55 |
56 | Then, a working sample is available in the `02-import-bcp.ps1` script.
57 |
58 | ## Use BULK INSERT or OPENROWSET
59 |
60 | If your data is in an Azure Blob Storage, you can import or read the file right from Azure SQL, without the need to use any external tool.
61 |
62 | Sample is here: `03-bulkinsert-openrowset.sql`
63 |
64 | ## Use Write-DbaDbTableData
65 |
66 | If you are a Powershell user, you can use the Write-DbaDbTableData cmdlet made available by the amazing [dbatools](https://docs.dbatools.io/#Write-DbaDbTableData) project.
67 |
68 | Sample is available in the `01-import-csv.ps1` script.
69 |
70 | ## Additional Resources
71 |
72 | Of course there are more ways to import (and export) data into Azure SQL. Here's the most common one
73 |
74 | ### Azure Data Factory
75 |
76 | The easiest option, also with great performances. It [supports a variety of different data sources](https://docs.microsoft.com/en-us/azure/data-factory/supported-file-formats-and-compression-codecs ), not only CSV and JSON, but also Parquet, AVRO, ORC and so on.
77 |
78 | The [Copy Data](https://docs.microsoft.com/en-us/azure/data-factory/tutorial-copy-data-tool) tool guides you through the entire process of importing data with an easy-to-follow, wizard-style, interface.
79 |
80 | This YouTube video shows the best pratices to load data as fast as possibile in Azure SQL via Azure Data Factory: [Best Practices Using Azure SQL as Sink in ADF](https://www.youtube.com/watch?v=p0CFHMtGVwM&list=PL3EZ3A8mHh0yrJwK_lnz9_J4cimw9ivYD&index=16)
81 |
82 | ### Apache Spark
83 |
84 | Apache Spark is another option if you need more flexibility. It can read from almost any data format, and it can efficiently write data into Azure SQL. Full end-to-end sample on how to import data as fast as possible here: [Fast Data Loading in Azure SQL DB using Azure Databricks](https://github.com/Azure-Samples/azure-sql-db-databricks/tree/main/)
85 |
86 | ### Azure Synapse
87 |
88 | Azure Synapse is another way to read common data formats, like Parquet or sets of CSVs from Azure SQL, using Azure Synapse Serverless SQL Pools as a bridge. Here's a detailed article on how to do that: [Azure SQL can read Azure Data Lake storage files using Synapse SQL external tables](https://devblogs.microsoft.com/azure-sql/read-azure-storage-files-using-synapse-sql-external-tables/)
89 |
--------------------------------------------------------------------------------
/csv/customer.fmt:
--------------------------------------------------------------------------------
1 | 14.0
2 | 8
3 | 1 SQLCHAR 0 50 "|" 1 C_CUSTKEY ""
4 | 2 SQLCHAR 0 50 "|" 2 C_NAME ""
5 | 3 SQLCHAR 0 50 "|" 3 C_ADDRESS ""
6 | 4 SQLCHAR 0 50 "|" 4 C_NATIONKEY ""
7 | 5 SQLCHAR 0 50 "|" 5 C_PHONE ""
8 | 6 SQLCHAR 0 50 "|" 6 C_ACCTBAL ""
9 | 7 SQLCHAR 0 50 "|" 7 C_MKTSEGMENT ""
10 | 8 SQLCHAR 0 200 "|\r\n" 8 C_COMMENT ""
11 |
12 |
--------------------------------------------------------------------------------
/csv/video_games.csv.readme.txt:
--------------------------------------------------------------------------------
1 | Video Games CSV File
2 | From the CORGIS Dataset Project
3 | By Austin Cory Bart acbart@vt.edu
4 |
5 | Downloaded from:
6 | https://corgis-edu.github.io/corgis/csv/video_games/
7 |
8 |
--------------------------------------------------------------------------------
/json/json-generator.txt:
--------------------------------------------------------------------------------
1 | SITE:
2 | https://www.json-generator.com/
3 |
4 | TEMPLATE:
5 | {
6 | _id: '{{objectId()}}',
7 | firstName:'{{firstName()}}',
8 | lastName:'{{surname()}}',
9 | isAlive: '{{bool()}}',
10 | age:'{{integer(14,99)}}',
11 | address: {
12 | streetAddress:'{{street()}}',
13 | city:"{{city()}}",
14 | state:"{{state()}}",
15 | postalCode:"{{integer(100, 10000)}}"
16 | },
17 | "phoneNumbers": ['{{repeat(1, 3)}}',
18 | {type: function (tags) {
19 | var fruits = ['home', 'work', 'mobile'];
20 | return fruits[tags.integer(0, fruits.length - 1)];
21 | },
22 | number: "+1 {{phone()}}"
23 | }]
24 | }
25 |
26 |
27 |
--------------------------------------------------------------------------------
/json/user1.json:
--------------------------------------------------------------------------------
1 | {
2 | "_id": "5fed1b38309495de1bc4f653",
3 | "firstName": "Sims",
4 | "lastName": "Arnold",
5 | "isAlive": false,
6 | "age": 35,
7 | "address": {
8 | "streetAddress": "Sumner Place",
9 | "city": "Canoochee",
10 | "state": "Palau",
11 | "postalCode": 1558
12 | },
13 | "phoneNumbers": [
14 | {
15 | "type": "home",
16 | "number": "+1 (830) 465-2965"
17 | },
18 | {
19 | "type": "home",
20 | "number": "+1 (889) 439-3632"
21 | }
22 | ]
23 | }
--------------------------------------------------------------------------------
/json/user2.json:
--------------------------------------------------------------------------------
1 | {
2 | "_id": "5fed1b4a9edd01ce6faa1481",
3 | "firstName": "Ester",
4 | "lastName": "Camacho",
5 | "isAlive": false,
6 | "age": 15,
7 | "address": {
8 | "streetAddress": "Evergreen Avenue",
9 | "city": "Villarreal",
10 | "state": "Nebraska",
11 | "postalCode": 2580
12 | },
13 | "phoneNumbers": [
14 | {
15 | "type": "work",
16 | "number": "+1 (811) 411-3710"
17 | },
18 | {
19 | "type": "mobile",
20 | "number": "+1 (983) 463-3785"
21 | },
22 | {
23 | "type": "home",
24 | "number": "+1 (959) 505-3471"
25 | }
26 | ]
27 | }
--------------------------------------------------------------------------------
/json/user3.json:
--------------------------------------------------------------------------------
1 | {
2 | "_id": "5fed1b5d93edd75c4ab847f8",
3 | "firstName": "Francesca",
4 | "lastName": "Hurst",
5 | "isAlive": false,
6 | "age": 16,
7 | "address": {
8 | "streetAddress": "Fleet Place",
9 | "city": "Baker",
10 | "state": "Mississippi",
11 | "postalCode": 4918
12 | },
13 | "phoneNumbers": [
14 | {
15 | "type": "mobile",
16 | "number": "+1 (923) 410-3051"
17 | }
18 | ]
19 | }
--------------------------------------------------------------------------------
/json/users.json:
--------------------------------------------------------------------------------
1 | {"_id":"5ff36862d2693e8e8f21ad09","firstName":"Houston","lastName":"Carpenter","isAlive":false,"age":56,"address":{"streetAddress":"Russell Street","city":"Nogal","state":"Palau","postalCode":2161},"phoneNumbers":[{"type":"mobile","number":"+1 (836) 542-3319"},{"type":"home","number":"+1 (997) 467-3043"}]}
2 | {"_id":"5ff36868b9348826533a79e3","firstName":"Reba","lastName":"Mcintosh","isAlive":true,"age":96,"address":{"streetAddress":"Gold Street","city":"Titanic","state":"New York","postalCode":4809},"phoneNumbers":[{"type":"mobile","number":"+1 (861) 454-3134"},{"type":"work","number":"+1 (912) 554-2411"}]}
3 | {"_id":"5ff3686fdfc968f62fb0fb29","firstName":"Laurie","lastName":"Hughes","isAlive":false,"age":96,"address":{"streetAddress":"Baughman Place","city":"Keller","state":"Arizona","postalCode":5659},"phoneNumbers":[{"type":"work","number":"+1 (944) 548-2700"},{"type":"home","number":"+1 (971) 409-3822"}]}
4 | {"_id":"5ff36879f03e0284b4a95dcb","firstName":"Sanchez","lastName":"Morse","isAlive":true,"age":98,"address":{"streetAddress":"Delevan Street","city":"Chical","state":"Indiana","postalCode":1736},"phoneNumbers":[{"type":"work","number":"+1 (963) 436-3814"},{"type":"mobile","number":"+1 (966) 441-2426"},{"type":"mobile","number":"+1 (943) 400-2233"}]}
5 | {"_id":"5ff368815f1e3fdad51c1a6e","firstName":"Clara","lastName":"Cantrell","isAlive":true,"age":75,"address":{"streetAddress":"College Place","city":"Masthope","state":"Oklahoma","postalCode":2905},"phoneNumbers":[{"type":"mobile","number":"+1 (967) 578-2188"},{"type":"work","number":"+1 (993) 501-3582"},{"type":"mobile","number":"+1 (882) 574-2090"}]}
6 |
--------------------------------------------------------------------------------
/scripts/.env.ps1.template:
--------------------------------------------------------------------------------
1 | ## Configure the following variables to be correct for your Azure SQL database
2 | $db_user = ""
3 | $db_password = ""
4 | $db_server = ".database.windows.net"
5 | $db_database = ""
6 | $db_table = ""
7 |
--------------------------------------------------------------------------------
/scripts/00-on-prem-tools-user-setup.sql:
--------------------------------------------------------------------------------
1 | /*
2 | ON MASTER (needed for DBATools Demo)
3 | */
4 |
5 | --Create Login
6 | create login [demouser] with password = 'Demo_USer-Str0ngPassw0rd!'
7 | go
8 |
9 | -- Create User
10 | create user [demouser] from login [demouser]
11 | go
12 |
13 | /*
14 | ON TARGET DATABASE
15 | */
16 |
17 | -- Create user
18 | create user [demouser] from login [demouser]
19 | go
20 | alter role [db_owner] add member [demouser]
21 | go
22 |
23 | -- Create Table
24 | drop table if exists [dbo].[customer];
25 | create table [dbo].[customer]
26 | (
27 | [C_CUSTKEY] [int] not null,
28 | [C_NAME] [varchar](25) not null,
29 | [C_ADDRESS] [varchar](40) not null,
30 | [C_NATIONKEY] [int] not null,
31 | [C_PHONE] [char](15) not null,
32 | [C_ACCTBAL] [decimal](15, 2) not null,
33 | [C_MKTSEGMENT] [char](10) not null,
34 | [C_COMMENT] [varchar](117) not null
35 | )
36 | go
--------------------------------------------------------------------------------
/scripts/01-import-csv.ps1:
--------------------------------------------------------------------------------
1 | # Source .env file
2 | . .\.env.ps1
3 |
4 | ## Sample file to import
5 | $csv_path = "..\csv\customer.tbl"
6 |
7 | ## Go!
8 | $dt = Import-Csv -Delimiter "|" -Path $csv_path -Header "C_CUSTKEY", "C_NAME", "C_ADDRESS", "C_NATIONKEY", "C_PHONE", "C_ACCTBAL", "C_MKTSEGMENT", "C_COMMENT"
9 |
10 | $sp = ConvertTo-SecureString $db_password -AsPlainText -Force
11 | $c = New-Object System.Management.Automation.PSCredential ($db_user, $sp)
12 |
13 | Write-DbaDbTableData -SqlInstance $db_server -SqlCredential $c -InputObject $dt -Database $db_database -Table $db_table -Truncate
14 |
--------------------------------------------------------------------------------
/scripts/02-import-bcp.ps1:
--------------------------------------------------------------------------------
1 | # Source .env file
2 | . .\.env.ps1
3 |
4 | ## Sample file to import
5 | $csv_path = "..\csv\customer.tbl"
6 |
7 | ## BCP tool
8 | $bcp_path = "C:\Program Files\Microsoft SQL Server\Client SDK\ODBC\170\Tools\Binn\bcp.exe"
9 |
10 | ## Go!
11 | & $bcp_path $db_table in $csv_path -S $db_server -U $db_user -P $db_password -d $db_database -c -C 65001 -r "|\n" -t "|"
12 |
--------------------------------------------------------------------------------
/scripts/03-bulkinsert-openrowset.sql:
--------------------------------------------------------------------------------
1 | /*
2 | SETUP
3 | */
4 |
5 | /*
6 | drop external data source [dmstore2_misc]
7 | drop database scoped credential [dmstore2_misc]
8 | drop master key
9 | go
10 | */
11 |
12 | select * from sys.[database_scoped_credentials]
13 | go
14 | select * from sys.[external_data_sources]
15 | go
16 |
17 | create master key encryption by password = 'My-L0ng&Str0ng_P4ss0wrd!';
18 | go
19 |
20 | -- Store the SAS Token (WITHOUT leading "?")
21 | -- It can be easily generated using Azure Storage Explorer or AZ CLI or Powershell or Portal
22 | create database scoped credential [dmstore2_misc]
23 | with identity = 'SHARED ACCESS SIGNATURE',
24 | secret = '';
25 | go
26 |
27 | -- Create the data source
28 | create external data source [dmstore2_misc]
29 | with
30 | (
31 | type = blob_storage,
32 | location = 'https://dmstore2.blob.core.windows.net/misc',
33 | credential = [dmstore2_misc]
34 | );
35 | go
36 |
37 | /*
38 | --- JSON Files ---
39 | */
40 |
41 | -- Read the JSON file
42 | -- CANNOT use widcards
43 | select
44 | cast(bulkcolumn as nvarchar(max)) as jsondata
45 | from
46 | openrowset(bulk 'import-demo/user1.json', data_source = 'dmstore2_misc', single_clob) as azureblob
47 | go
48 |
49 | -- Read and access the content of the JSON file
50 | with cte as
51 | (
52 | select
53 | cast(bulkcolumn as nvarchar(max)) as jsondata
54 | from
55 | openrowset(bulk 'import-demo/user1.json', data_source = 'dmstore2_misc', single_clob) as azureblob
56 | )
57 | select
58 | j.*
59 | from
60 | cte
61 | cross apply
62 | openjson(cte.jsondata) j
63 | go
64 |
65 | -- Read and access the content of the JSON file, with schema-on-read
66 | with cte as
67 | (
68 | select
69 | cast(bulkcolumn as nvarchar(max)) as jsondata
70 | from
71 | openrowset(bulk 'import-demo/user1.json', data_source = 'dmstore2_misc', single_clob) as azureblob
72 | )
73 | select
74 | j.*
75 | from
76 | cte
77 | cross apply
78 | openjson(cte.jsondata) with
79 | (
80 | firstName nvarchar(50),
81 | lastName nvarchar(50),
82 | isAlive bit,
83 | age int,
84 | [address] nvarchar(max) as json,
85 | phoneNumbers nvarchar(max) as json
86 | ) j
87 | go
88 |
89 | -- What if source is a list of json rows?
90 | with cte as
91 | (
92 | select
93 | cast(bulkcolumn as nvarchar(max)) as jsondata
94 | from
95 | openrowset(bulk 'import-demo/users.json', data_source = 'dmstore2_misc', single_clob) as azureblob
96 | )
97 | select
98 | s.[value] as jsonrow
99 | from
100 | cte c
101 | cross apply
102 | string_split(c.jsondata, char(10)) s
103 | where
104 | replace(s.[value], char(13), '') <> ''
105 | go
106 |
107 | /*
108 | --- CSV / Flat Files ---
109 | */
110 | -- Create target table
111 | drop table if exists [dbo].[customer];
112 | create table [dbo].[customer]
113 | (
114 | [C_CUSTKEY] [int] not null,
115 | [C_NAME] [varchar](25) not null,
116 | [C_ADDRESS] [varchar](40) not null,
117 | [C_NATIONKEY] [int] not null,
118 | [C_PHONE] [char](15) not null,
119 | [C_ACCTBAL] [decimal](15, 2) not null,
120 | [C_MKTSEGMENT] [char](10) not null,
121 | [C_COMMENT] [varchar](117) not null
122 | )
123 | go
124 |
125 | -- Import data
126 | -- Data types will be inferred from table schema
127 | bulk insert dbo.[customer]
128 | from 'import-demo/customer.tbl'
129 | with (
130 | data_source = 'dmstore2_misc',
131 | codepage = '65001',
132 | fieldterminator = '|',
133 | rowterminator = '|\n'
134 | )
135 | go
136 |
137 | -- Check imported data
138 | select top (100) * from [dbo].[customer]
139 | go
140 |
141 | -- Query data without importing
142 | -- A formatfile is needed to provide required schema
143 | select
144 | *
145 | from
146 | openrowset(bulk 'import-demo/customer.tbl',
147 | data_source = 'dmstore2_misc',
148 | codepage = '65001',
149 | formatfile = 'import-demo/customer.fmt',
150 | formatfile_data_source = 'dmstore2_misc'
151 | ) as azureblob
152 | go
153 |
--------------------------------------------------------------------------------