├── .gitattributes ├── .gitignore ├── Chapter02 ├── Chapter02.sql └── DescriptiveStatistics.dll ├── Chapter03 └── Chapter03.R ├── Chapter04 ├── 01 Functions.sql ├── 02 DropIfExists.sql ├── 03 AlterColumnOnline.sql ├── 04 TruncateTable.sql ├── 05 IndexKeySizeLimit.sql └── 06 QueryHints.sql ├── Chapter05 ├── 01 FORJSON.sql ├── 02 OPENJSON.sql ├── 03 ISSON.sql ├── 04 JSON_VALUE.sql ├── 05 JSON_QUERY.sql ├── 06 MODIFY.sql ├── 07 JSON Performance.sql └── app.people.json ├── Chapter06 └── Stretch Databases.sql ├── Chapter07 ├── 01 ApplicationTime.sql ├── 02 System Versioned Tables.sql └── 03 SCD.sql ├── Chapter08 ├── AEDemo.exe ├── AEDemo │ ├── .vs │ │ └── AEDemo │ │ │ └── v14 │ │ │ └── .suo │ ├── AEDemo.sln │ └── AEDemo │ │ ├── AEDemo.csproj │ │ ├── App.config │ │ ├── Program.cs │ │ ├── Properties │ │ └── AssemblyInfo.cs │ │ ├── bin │ │ └── Debug │ │ │ ├── AEDemo.exe │ │ │ ├── AEDemo.exe.config │ │ │ ├── AEDemo.pdb │ │ │ ├── AEDemo.vshost.exe │ │ │ ├── AEDemo.vshost.exe.config │ │ │ └── AEDemo.vshost.exe.manifest │ │ └── obj │ │ └── Debug │ │ ├── AEDemo.csproj.FileListAbsolute.txt │ │ ├── AEDemo.csprojResolveAssemblyReference.cache │ │ ├── AEDemo.exe │ │ ├── AEDemo.pdb │ │ ├── DesignTimeResolveAssemblyReferencesInput.cache │ │ ├── TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs │ │ ├── TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs │ │ └── TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs ├── Chapter08.sql ├── Chapter08AE1.sql ├── Chapter08AE2.sql ├── Chapter08DDM.sql ├── Chapter08RLS.sql └── Chapter08TDE.sql ├── Chapter09 └── QueryStore.sql ├── Chapter10 └── Chapter10.sql ├── Chapter11 └── B05694_11_01.sql ├── Chapter12 └── B05694_12_01.sql ├── Chapter13 ├── Chapter13.R ├── Chapter13.sql ├── Chapter13_TM.csv └── Code file.txt ├── Chapter14 ├── Chapter14.R └── Chapter14_TM.csv ├── LICENSE └── README.md /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Windows image file caches 2 | Thumbs.db 3 | ehthumbs.db 4 | 5 | # Folder config file 6 | Desktop.ini 7 | 8 | # Recycle Bin used on file shares 9 | $RECYCLE.BIN/ 10 | 11 | # Windows Installer files 12 | *.cab 13 | *.msi 14 | *.msm 15 | *.msp 16 | 17 | # Windows shortcuts 18 | *.lnk 19 | 20 | # ========================= 21 | # Operating System Files 22 | # ========================= 23 | 24 | # OSX 25 | # ========================= 26 | 27 | .DS_Store 28 | .AppleDouble 29 | .LSOverride 30 | 31 | # Thumbnails 32 | ._* 33 | 34 | # Files that might appear in the root of a volume 35 | .DocumentRevisions-V100 36 | .fseventsd 37 | .Spotlight-V100 38 | .TemporaryItems 39 | .Trashes 40 | .VolumeIcon.icns 41 | 42 | # Directories potentially created on remote AFP share 43 | .AppleDB 44 | .AppleDesktop 45 | Network Trash Folder 46 | Temporary Items 47 | .apdisk 48 | -------------------------------------------------------------------------------- /Chapter02/Chapter02.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter02/Chapter02.sql -------------------------------------------------------------------------------- /Chapter02/DescriptiveStatistics.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter02/DescriptiveStatistics.dll -------------------------------------------------------------------------------- /Chapter03/Chapter03.R: -------------------------------------------------------------------------------- 1 | # ---------------------------------------------------- 2 | # -------- SQL Server 2016 Developer's Guide -------- 3 | # ----- Chapter 03 - SQL Server Tools ----- 4 | # ---------------------------------------------------- 5 | 6 | # ---------------------------------------------------- 7 | # -- Section 2: Tools for Developing R Code 8 | # ---------------------------------------------------- 9 | 10 | # Analyze the built-in iris dataset 11 | data(iris); 12 | plot(iris); 13 | 14 | 15 | # Custom function for skewness and kurtosis 16 | 17 | skewkurt <- function(p) { 18 | avg <- mean(p) 19 | cnt <- length(p) 20 | stdev <- sd(p) 21 | skew <- sum((p-avg)^3/stdev^3)/cnt 22 | kurt <- sum((p-avg)^4/stdev^4)/cnt-3 23 | return(c(skewness=skew, kurtosis=kurt)) 24 | } 25 | 26 | 27 | # Call 28 | skewkurt(iris$Sepal.Length); 29 | 30 | # Clean up the workspace 31 | rm(skewkurt); 32 | rm(iris); 33 | -------------------------------------------------------------------------------- /Chapter04/01 Functions.sql: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------- 2 | -------- SQL Server 2016 Developer’s Guide 3 | -------- Chapter 04 - Transact-SQL Enhancements 4 | -------- Functions 5 | -------------------------------------------------------------------- 6 | 7 | ---------------------------------------------------- 8 | -- Functions (STRING_SPLIT) 9 | ---------------------------------------------------- 10 | --split the input string and return a table 11 | SELECT value 12 | FROM STRING_SPLIT(N'Rapid Wien,Benfica Lisbon,Seattle Seahawks',','); 13 | /*Result: 14 | value 15 | ------ 16 | Rapid Wien 17 | Benfica Lisbon 18 | Seattle Seahawks 19 | */ 20 | 21 | --get stock items having the tag "Super Value" 22 | USE WideWorldImporters; 23 | SELECT StockItemID, StockItemName, Tags 24 | FROM Warehouse.StockItems 25 | WHERE '"Super Value"' IN (SELECT value FROM STRING_SPLIT(REPLACE(REPLACE(Tags,'[',''), ']',''), ',')); 26 | 27 | 28 | --Get order details for comma separated list of order IDs (using STRING_SPLIT) 29 | USE WideWorldImporters; 30 | DECLARE @orderIds AS VARCHAR(100) = '1,3,7,8,9,11'; 31 | SELECT o.OrderID, o.CustomerID, o.OrderDate 32 | FROM Sales.Orders o 33 | INNER JOIN STRING_SPLIT(@orderIds,',') x ON x.value= o.OrderID; 34 | GO 35 | /*Result: 36 | OrderID CustomerID OrderDate 37 | ----------- ----------- ---------- 38 | 1 832 2013-01-01 39 | 3 105 2013-01-01 40 | 7 575 2013-01-01 41 | 8 964 2013-01-01 42 | 9 77 2013-01-01 43 | 11 586 2013-01-01 44 | */ 45 | --Get order details for comma separated list of order IDs (using OPENJSON) 46 | DECLARE @orderIds AS VARCHAR(100) = '1,3,7,8,9,11'; 47 | SELECT o.OrderID, o.CustomerID, o.OrderDate 48 | FROM Sales.Orders o 49 | INNER JOIN (SELECT value FROM OPENJSON( CHAR(91) + @orderIds + CHAR(93) )) x ON x.value= o.OrderID; 50 | GO 51 | /*Result: 52 | OrderID CustomerID OrderDate 53 | ----------- ----------- ---------- 54 | 1 832 2013-01-01 55 | 3 105 2013-01-01 56 | 7 575 2013-01-01 57 | 8 964 2013-01-01 58 | 9 77 2013-01-01 59 | 11 586 2013-01-01 60 | */ 61 | 62 | --NULL input produces an empty resultset 63 | DECLARE @input AS NVARCHAR(20) = NULL; 64 | SELECT * FROM STRING_SPLIT(@input,',') 65 | /*Result: 66 | value 67 | -------------------- 68 | */ 69 | 70 | --The database must be in the compatibility level 130, otherwise you'll get exception 71 | USE WideWorldImporters; 72 | ALTER DATABASE WideWorldImporters SET COMPATIBILITY_LEVEL = 120; 73 | GO 74 | SELECT value FROM STRING_SPLIT('1,2,3',','); 75 | /*Result: 76 | Msg 208, Level 16, State 1, Line 65 77 | Invalid object name 'STRING_SPLIT'. 78 | */ 79 | --back to the original compatibility level 80 | ALTER DATABASE WideWorldImporters SET COMPATIBILITY_LEVEL = 130; 81 | GO 82 | 83 | ---------------------------------------------------- 84 | -- Functions (STRING_ESCAPE) 85 | ---------------------------------------------------- 86 | 87 | --Data type for the input must be string 88 | SELECT STRING_ESCAPE(1, 'JSON') AS escaped_input; 89 | /*Result: 90 | 91 | Msg 8116, Level 16, State 1, Line 13 92 | Argument data type int is invalid for argument 1 of string_escape function. 93 | */ 94 | 95 | --Deprecated string types TEXT and NTEXT cannot be used, too 96 | DECLARE @input AS TEXT = 'test'; 97 | SELECT STRING_ESCAPE(@input, 'JSON') AS escaped_input; 98 | 99 | /*Result: 100 | 101 | Msg 2739, Level 16, State 1, Line 20 102 | The text, ntext, and image data types are invalid for local variables. 103 | */ 104 | 105 | --Escaping string input 106 | SELECT STRING_ESCAPE('a\bc/de"f','JSON') AS escaped_input; 107 | /*Result: 108 | 109 | escaped_input 110 | -------------- 111 | a\\bc\/de\"f 112 | */ 113 | 114 | --Escaping string input 115 | SELECT STRING_ESCAPE(N'one 116 | "two" 117 | three/four\', 'JSON') AS escaped_input; 118 | 119 | /*Result: 120 | 121 | escaped_input 122 | ------------------------------------ 123 | one\r\n\t\"two\"\r\n\tthree\/four\\ 124 | */ 125 | 126 | --Both keys and values are escaped 127 | SELECT STRING_ESCAPE(N'key:1, i\d:4', 'JSON') AS escaped_input;; 128 | /*Result: 129 | 130 | key:1, i\\d:4 131 | */ 132 | 133 | --Escaping control characters 134 | SELECT STRING_ESCAPE(CHAR(0), 'JSON') AS escaped_char0, STRING_ESCAPE(CHAR(4), 'JSON') AS escaped_char4, STRING_ESCAPE(CHAR(31), 'JSON') AS escaped_char31; 135 | /*Result: 136 | 137 | escaped_char0 escaped_char4 escaped_char31 138 | -------------- --------------- ---------- 139 | \u0000 \u0004 \u001f 140 | */ 141 | 142 | --Escaping horizontal tab with multiple representations (both end up with the same escaping sequence) 143 | SELECT STRING_ESCAPE(CHAR(9), 'JSON') AS escaped_tab1, STRING_ESCAPE(' ', 'JSON') AS escaped_tab2; 144 | /*Result: 145 | 146 | escaped_tab1 escaped_tab2 147 | -------------- -------------- 148 | \t \t 149 | */ 150 | 151 | --NULL as input produces NULL as output 152 | DECLARE @input AS NVARCHAR(20) = NULL; 153 | SELECT STRING_ESCAPE(@input, 'JSON') AS escaped_input; 154 | /*Result: 155 | 156 | escaped_input 157 | -------------- 158 | NULL 159 | */ 160 | 161 | --Do not use it for an already formatted JSON string - all double quotas will be escaped 162 | DECLARE @json AS NVARCHAR(200) = '{ 163 | "id": 1, 164 | "name": "Milos Radivojevic", 165 | "country": "Austria", 166 | "favorite teams": ["Seattle Seahawks", "Benfica Lisbon", "Rapid Wien"] 167 | }'; 168 | SELECT STRING_ESCAPE(@json, 'JSON') AS escaped_json; 169 | /*Result: 170 | 171 | {\r\n \"id\": 1,\r\n \"name\": \"Milos Radivojevic\",\r\n \"country\": \"Austria\",\r\n \"favorite teams\": [\"Seattle Seahawks\", \"Benfica Lisbon\", \"Rapid Wien\"]\r\n} 172 | */ 173 | 174 | ---------------------------------------------------- 175 | -- Functions (COMPRESS) 176 | ---------------------------------------------------- 177 | 178 | --Use data collected by the XE session system_health to check potential compression rate for the column target_data 179 | SELECT 180 | xet.target_name, 181 | DATALENGTH(xet.target_data) AS original_size, 182 | DATALENGTH(COMPRESS(xet.target_data)) AS compressed_size, 183 | CAST((DATALENGTH(xet.target_data) - DATALENGTH(COMPRESS(xet.target_data)))*100.0/DATALENGTH(xet.target_data) AS DECIMAL(5,2)) AS compression_rate_in_percent 184 | FROM sys.dm_xe_session_targets xet 185 | INNER JOIN sys.dm_xe_sessions xe ON xe.address = xet.event_session_address 186 | WHERE xe.name = 'system_health'; 187 | GO 188 | /*Result (in abbreviated form, you might get different, but similar results) 189 | 190 | target_name original_size compressed_size compression_rate_in_percent 191 | -------------------- -------------------- -------------------- --------------------------------------- 192 | ring_buffer 3421326 86207 97.48 193 | event_file 408 222 45.59 194 | You can see that compression rate is about 97% 195 | */ 196 | 197 | --Compressed value can be also longer than the original one 198 | DECLARE @input AS NVARCHAR(15) = N'SQL Server 2016'; 199 | SELECT @input AS input, DATALENGTH(@input) AS input_size, COMPRESS(@input) AS compressed, DATALENGTH(COMPRESS(@input)) AS compressed_size; 200 | GO 201 | /*Result 202 | 203 | input input_size compressed compressed_size 204 | --------------- ----------- ------------------------------------------------------------------------------------------------- ------ 205 | SQL Server 2016 30 0x1F8B08000000000004000B660864F06150600866486528622803930A0C460C060C860C660C0041F9E38D1E000000 46 206 | 207 | */ 208 | 209 | --Compare compression rate between ROW and PAGE compression and the COMPRESS function 210 | --Let's create four sample tables: 211 | USE WideWorldImporters; 212 | 213 | --No compression 214 | DROP TABLE IF EXISTS dbo.messages; 215 | SELECT message_id, language_id, severity, is_event_logged, text 216 | INTO dbo.messages 217 | FROM sys.messages; 218 | CREATE UNIQUE CLUSTERED INDEX PK_messages ON dbo.messages(message_id, language_id); 219 | GO 220 | 221 | --ROW Compression 222 | DROP TABLE IF EXISTS dbo.messages_row; 223 | SELECT message_id, language_id, severity, is_event_logged, text 224 | INTO dbo.messages_row 225 | FROM sys.messages; 226 | CREATE UNIQUE CLUSTERED INDEX PK_messages_row ON dbo.messages_row(message_id, language_id) WITH(DATA_COMPRESSION = ROW); 227 | GO 228 | 229 | --PAGE Compression 230 | DROP TABLE IF EXISTS dbo.messages_page; 231 | SELECT message_id, language_id, severity, is_event_logged, text 232 | INTO dbo.messages_page 233 | FROM sys.messages; 234 | CREATE UNIQUE CLUSTERED INDEX PK_messages_page ON dbo.messages_page(message_id, language_id) WITH(DATA_COMPRESSION = PAGE); 235 | GO 236 | 237 | --COMPRESS Function 238 | DROP TABLE IF EXISTS dbo.messages_compress; 239 | SELECT message_id, language_id, severity, is_event_logged, COMPRESS(text) AS text 240 | INTO dbo.messages_compress 241 | FROM sys.messages; 242 | CREATE UNIQUE CLUSTERED INDEX PK_messages_compress ON dbo.messages_compress(message_id, language_id); 243 | GO 244 | 245 | --Check the size of all tables: 246 | EXEC sp_spaceused 'dbo.messages'; 247 | /*Result 248 | name rows reserved data index_size unused 249 | -------------- ------- ------------- ------------- ------------- ------- 250 | dbo.messages 278718 70216 KB 70000 KB 152 KB 64 KB 251 | */ 252 | EXEC sp_spaceused 'dbo.messages_row'; 253 | /*Result 254 | name rows reserved data index_size unused 255 | -------------- ------- ------------- ------------- ------------- ------- 256 | dbo.messages_row 278718 39304 KB 39128 KB 96 KB 80 KB 257 | */ 258 | 259 | EXEC sp_spaceused 'dbo.messages_page'; 260 | /*Result 261 | name rows reserved data index_size unused 262 | -------------- ------- ------------- ------------- ------------- ------- 263 | dbo.messages_page 278718 39112 KB 38936 KB 96 KB 80 KB 264 | */ 265 | EXEC sp_spaceused 'dbo.messages_compress'; 266 | /*Result 267 | name rows reserved data index_size unused 268 | -------------- ------- ------------- ------------- ------------- ------- 269 | dbo.messages_compress 278718 46536 KB 46344 KB 104 KB 88 KB 270 | */ 271 | 272 | --Cleanup 273 | DROP TABLE IF EXISTS dbo.messages; 274 | DROP TABLE IF EXISTS dbo.messages_row; 275 | DROP TABLE IF EXISTS dbo.messages_page; 276 | DROP TABLE IF EXISTS dbo.messages_compress; 277 | 278 | ---------------------------------------------------- 279 | -- Functions (DECOMPRESS) 280 | ---------------------------------------------------- 281 | 282 | --the function returns VARBINARY 283 | DECLARE @input AS NVARCHAR(100) = N'SQL Server 2016 Developer''s Guide'; 284 | SELECT DECOMPRESS(COMPRESS(@input)) AS input; 285 | GO 286 | /*Result 287 | 288 | input 289 | --------------------------------------------------------------------------------------------------------------------------------------- 290 | 0x530051004C00200053006500720076006500720020003200300031003600200044006500760065006C006F0070006500720027007300200047007500690064006500 291 | */ 292 | --to get correct result you need to CAST to original data type 293 | DECLARE @input AS NVARCHAR(100) = N'SQL Server 2016 Developer''s Guide'; 294 | SELECT CAST(DECOMPRESS(COMPRESS(@input)) AS NVARCHAR(100)) AS input; 295 | GO 296 | /*Result 297 | 298 | input 299 | ------------------------------------- 300 | SQL Server 2016 Developer's Guide 301 | */ 302 | 303 | --if you choose non-unicode CAST you will end up with unexpected results 304 | DECLARE @input AS NVARCHAR(100) = N'SQL Server 2016 Developer''s Guide'; 305 | SELECT CAST(DECOMPRESS(COMPRESS(@input)) AS VARCHAR(100)) AS input; 306 | GO 307 | /*Result if you has chosen the option Results to Text to display query results 308 | 309 | input 310 | -------------------------------------------------------------------- 311 | S Q L S e r v e r 2 0 1 6 D e v e l o p e r ' s G u i d e 312 | */ 313 | 314 | /*Result if you has chosen the option Results to Grid to display query results 315 | 316 | input 317 | ------- 318 | S 319 | */ 320 | 321 | --in opposite direction is even more funny 322 | DECLARE @input AS VARCHAR(100) = N'SQL Server 2016 Developer''s Guide'; 323 | SELECT CAST(DECOMPRESS(COMPRESS(@input)) AS NVARCHAR(100)); 324 | GO 325 | /*Result 326 | 兓⁌敓癲牥㈠㄰‶敄敶潬数❲⁳畇摩e 327 | Bing translation to English: Fang ㈠㄰‶ Wu Zhen Tan Jin ⁌ duo epilepsy ❲ ⁳ 构词成分。 e 328 | */ 329 | 330 | ---------------------------------------------------- 331 | -- Functions (CURRENT_TRANSACTION_ID) 332 | ---------------------------------------------------- 333 | 334 | --Multiple calls of this function will result with different transaction numbers if there is no an explicit transaction 335 | SELECT CURRENT_TRANSACTION_ID(); 336 | SELECT CURRENT_TRANSACTION_ID(); 337 | BEGIN TRAN 338 | SELECT CURRENT_TRANSACTION_ID(); 339 | SELECT CURRENT_TRANSACTION_ID(); 340 | COMMIT 341 | GO 342 | /*Result (on my machine, you will definitely get different numbers, but with the same pattern) 343 | 344 | 921406054 345 | 921406055 346 | 921406056 347 | 921406056 348 | */ 349 | 350 | --You could use the function CURRENT_TRANSACTION_ID to check if your transaction in the active transactions: 351 | SELECT * FROM sys.dm_tran_active_transactions WHERE transaction_id = CURRENT_TRANSACTION_ID(); 352 | 353 | --The function SESSION_ID() works only in Azure Datawarehouse 354 | SELECT SESSION_ID(); 355 | /*Result 356 | 357 | Msg 195, Level 15, State 10, Line 133 358 | 'SESSION_ID' is not a recognized built-in function name. 359 | */ 360 | 361 | ---------------------------------------------------- 362 | -- Functions (SESSION_CONTEXT) 363 | ---------------------------------------------------- 364 | --Set session context variable 365 | EXEC sys.sp_set_session_context @key = N'language', @value = N'German'; 366 | --Read the variable 367 | SELECT SESSION_CONTEXT(N'language') AS lng; 368 | GO 369 | /*Result 370 | 371 | lng 372 | ------ 373 | German 374 | */ 375 | 376 | --The input type must be a nvarchar 377 | SELECT SESSION_CONTEXT('language') AS lng; 378 | /*Result 379 | 380 | Msg 8116, Level 16, State 1, Line 194 381 | Argument data type varchar is invalid for argument 1 of session_context function. 382 | */ 383 | 384 | --Even an NCHAR data type cannot be used! 385 | DECLARE @varname NCHAR(10) = N'language'; 386 | SELECT SESSION_CONTEXT(@varname) AS lng; 387 | GO 388 | /*Result 389 | 390 | Msg 8116, Level 16, State 1, Line 203 391 | Argument data type nchar is invalid for argument 1 of session_context function. 392 | */ 393 | 394 | --It must be an NVARCHAR data type 395 | DECLARE @varname NVARCHAR(10) = N'language'; 396 | SELECT SESSION_CONTEXT(@varname) AS lng; 397 | GO 398 | /*Result 399 | 400 | lng 401 | -------- 402 | German 403 | */ 404 | 405 | 406 | ---------------------------------------------------- 407 | -- Functions (DATEDIFF_BIG) 408 | ---------------------------------------------------- 409 | 410 | --Get difference between 1st January 1948 and 1st January 2016 in seconds 411 | SELECT DATEDIFF(SECOND,'19480101','20160101') AS diff; 412 | /*Result 413 | 414 | diff 415 | ----------- 416 | 2145916800 417 | */ 418 | 419 | --Get difference between 1st January 1947 and 1st January 2016 in seconds 420 | SELECT DATEDIFF(SECOND,'19470101','20160101') AS diff; 421 | /*Result 422 | 423 | Msg 535, Level 16, State 0, Line 233 424 | The datediff function resulted in an overflow. The number of dateparts separating two date/time instances is too large. Try to use datediff with a less precise datepart. 425 | */ 426 | 427 | --Get difference between 1st January 1947 and 1st January 2016 with the function DATEDIFF_BIG 428 | SELECT DATEDIFF_BIG(SECOND,'19470101','20160101') AS diff; 429 | /*Result 430 | 431 | diff 432 | -------------------- 433 | 2177452800 434 | 435 | */ 436 | 437 | --Get difference between min and max date supported by the data type DATETIME2 in microseconds 438 | SELECT DATEDIFF_BIG(MICROSECOND,'010101','99991231 23:59:59.999999999') AS diff; 439 | /*Result 440 | 441 | diff 442 | -------------------- 443 | 252423993599999999 444 | */ 445 | 446 | --Get difference between min and max date supported by the data type DATETIME2 in nanoseconds 447 | SELECT DATEDIFF_BIG(NANOSECOND,'010101','99991231 23:59:59.999999999') AS diff; 448 | /*Result 449 | 450 | Msg 535, Level 16, State 0, Line 255 451 | The datediff_big function resulted in an overflow. The number of dateparts separating two date/time instances is too large. Try to use datediff_big with a less precise datepart. 452 | Even with DATEDIFF_BIG an overflow is possible, but the query is anyway a non-sense 453 | */ 454 | 455 | ---------------------------------------------------- 456 | -- Functions (AT TIME ZONE) 457 | ---------------------------------------------------- 458 | SELECT 459 | SYSDATETIMEOFFSET() AS UTCTime, 460 | SYSDATETIMEOFFSET() AT TIME ZONE 'Eastern Standard Time' AS NewYork, 461 | SYSDATETIMEOFFSET() AT TIME ZONE 'Central European Standard Time' AS Vienna, 462 | SYSDATETIMEOFFSET() AT TIME ZONE 'Russian Standard Time' AS Moscow; 463 | /*Result 464 | 465 | 2016-06-18 11:56:51.4736992 +01:00 2016-06-18 06:56:51.4736992 -04:00 2016-06-18 12:56:51.4736992 +02:00 2016-06-18 13:56:51.4736992 +03:00 466 | */ 467 | 468 | SELECT 469 | CONVERT(DATETIME, SYSDATETIMEOFFSET()) AS UTCTime, 470 | CONVERT(DATETIME, SYSDATETIMEOFFSET() AT TIME ZONE 'Eastern Standard Time') AS NewYork_LocalTime, 471 | CONVERT(DATETIME, SYSDATETIMEOFFSET() AT TIME ZONE 'Central European Standard Time') AS Vienna_LocalTime, 472 | CONVERT(DATETIME, SYSDATETIMEOFFSET() AT TIME ZONE 'Russian Standard Time') AS Moscow_LocalTime; 473 | /*Result 474 | 475 | UTCTime NewYork Vienna Moscow 476 | ---------------------------------- ---------------------------------- ---------------------------------- ---------------------------------- 477 | 2016-06-18 14:00:17.7658845 +01:00 2016-06-18 09:00:17.7658845 -04:00 2016-06-18 15:00:17.7658845 +02:00 2016-06-18 16:00:17.7658845 +03:00 478 | */ 479 | 480 | --The following code displays time in four different time zones 481 | SELECT name, CONVERT(DATETIME, SYSDATETIMEOFFSET() AT TIME ZONE name) AS local_time 482 | FROM sys.time_zone_info 483 | WHERE name IN (SELECT value FROM STRING_SPLIT('UTC,Eastern Standard Time,Central European Standard Time,Russian Standard Time',',')); 484 | /*Result 485 | 486 | name local_time 487 | --------------------------------- ----------------------- 488 | Eastern Standard Time 2016-06-18 11:27:50.193 489 | UTC 2016-06-18 15:27:50.193 490 | Central European Standard Time 2016-06-18 17:27:50.193 491 | Russian Standard Time 2016-06-18 18:27:50.193 492 | */ 493 | 494 | --The values supported for time zone can be found in a new system catalog sys.time_zone_info 495 | SELECT * FROM sys.time_zone_info; 496 | /*Result (109 entries on my machine and in abbreviated form, you might get slightly different results) 497 | name current_utc_offset is_currently_dst 498 | -------------------------------------- ------------------ ---------------- 499 | Dateline Standard Time -12:00 0 500 | UTC-11 -11:00 0 501 | Hawaiian Standard Time -10:00 0 502 | Alaskan Standard Time -08:00 1 503 | Pacific Standard Time (Mexico) -07:00 1 504 | Pacific Standard Time -07:00 1 505 | US Mountain Standard Time -07:00 0 506 | ... 507 | Greenwich Standard Time +00:00 0 508 | W. Europe Standard Time +02:00 1 509 | Central Europe Standard Time +02:00 1 510 | Romance Standard Time +02:00 1 511 | Central European Standard Time +02:00 1 512 | W. Central Africa Standard Time +01:00 0 513 | ... 514 | Tonga Standard Time +13:00 0 515 | Samoa Standard Time +13:00 0 516 | Line Islands Standard Time +14:00 0 517 | */ 518 | 519 | --What time is in Seattle, when a clock in Vienna shows 22:33 today (15th August 2016)? Here is the answer: 520 | SELECT CAST('20160815 22:33' AS DATETIME) 521 | AT TIME ZONE 'Central European Standard Time' 522 | AT TIME ZONE 'Pacific Standard Time' AS seattle_time; 523 | /*Result 524 | seattle_time 525 | ------------------------------------- 526 | 2016-08-15 13:33:00.000 -07:00 527 | */ 528 | 529 | ---------------------------------------------------- 530 | -- Functions (HASHBYTES) 531 | ---------------------------------------------------- 532 | 533 | --You should run this query on SQL Server 2014 or an earlier version 534 | USE AdventureWorks2014; 535 | SELECT HASHBYTES('SHA2_256',(SELECT TOP (6) * FROM Sales.SalesOrderHeader FOR XML AUTO)) AS hashed_value; 536 | /*Result 537 | 538 | hashed_value 539 | ------------------------------------------------------------------ 540 | 0x26C8A739DB7BE2B27BCE757105E159647F70E02F45E56C563BBC3669BEF49AAF 541 | */ 542 | 543 | SELECT HASHBYTES('SHA2_256',(SELECT TOP (7) * FROM Sales.SalesOrderHeader FOR XML AUTO)) AS hashed_value; 544 | /*Result 545 | 546 | Msg 8152, Level 16, State 10, Line 19 547 | String or binary data would be truncated 548 | */ 549 | 550 | 551 | --the same query in SQL Server 2016 (even in the old compatibilty mode) 552 | USE AdventureWorks2016CTP3; 553 | SELECT HASHBYTES('SHA2_256',(SELECT TOP (7) * FROM Sales.SalesOrderHeader FOR XML AUTO)) AS hashed_value; 554 | /*Result 555 | 556 | hashed_value 557 | ------------------------------------------------------------------ 558 | 0x864E9FE792E0E99165B46F43DB43E659CDAD56F80369FD6D2C58AD2E8386CBF3 559 | */ 560 | 561 | --In SQL Server 2016 you can hash the whole table 562 | SELECT HASHBYTES('SHA2_256',(SELECT * FROM Sales.SalesOrderHeader FOR XML AUTO)) AS hashed_value; 563 | /*Result 564 | 565 | hashed_value 566 | ------------------------------------------------------------------ 567 | 0x2930C226E613EC838F88D821203221344BA93701D39A72813ABC7C936A8BEACA 568 | */ 569 | 570 | --Check hash value for product data mart in the AdventureWorks2016CTP3 database 571 | --You can calculate hash value for the whole data mart and compare it with the value on another instance 572 | --Useful for relativ static tables to check if something related to them has been changed 573 | USE AdventureWorks2016CTP3; 574 | SELECT HASHBYTES('SHA2_256',(SELECT * 575 | FROM 576 | Production.Product p 577 | INNER JOIN Production.ProductSubcategory sc ON p.ProductSubcategoryID = sc.ProductSubcategoryID 578 | INNER JOIN Production.ProductCategory c ON sc.ProductCategoryID = c.ProductCategoryID 579 | INNER JOIN Production.ProductListPriceHistory ph ON ph.ProductID = p.ProductID 580 | FOR XML AUTO)) AS hashed_value; 581 | /*Result 582 | 583 | hashed_value 584 | ------------------------------------------------------------------ 585 | 0xAFC05E912DC6742B085AFCC2619F158B823B4FE53ED1ABD500B017D7A899D99D 586 | */ 587 | 588 | -------------------------------------------------------------------------------- /Chapter04/02 DropIfExists.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter04/02 DropIfExists.sql -------------------------------------------------------------------------------- /Chapter04/03 AlterColumnOnline.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter04/03 AlterColumnOnline.sql -------------------------------------------------------------------------------- /Chapter04/04 TruncateTable.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter04/04 TruncateTable.sql -------------------------------------------------------------------------------- /Chapter04/05 IndexKeySizeLimit.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter04/05 IndexKeySizeLimit.sql -------------------------------------------------------------------------------- /Chapter04/06 QueryHints.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter04/06 QueryHints.sql -------------------------------------------------------------------------------- /Chapter05/01 FORJSON.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/01 FORJSON.sql -------------------------------------------------------------------------------- /Chapter05/02 OPENJSON.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/02 OPENJSON.sql -------------------------------------------------------------------------------- /Chapter05/03 ISSON.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/03 ISSON.sql -------------------------------------------------------------------------------- /Chapter05/04 JSON_VALUE.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/04 JSON_VALUE.sql -------------------------------------------------------------------------------- /Chapter05/05 JSON_QUERY.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/05 JSON_QUERY.sql -------------------------------------------------------------------------------- /Chapter05/06 MODIFY.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/06 MODIFY.sql -------------------------------------------------------------------------------- /Chapter05/07 JSON Performance.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/07 JSON Performance.sql -------------------------------------------------------------------------------- /Chapter05/app.people.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter05/app.people.json -------------------------------------------------------------------------------- /Chapter06/Stretch Databases.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter06/Stretch Databases.sql -------------------------------------------------------------------------------- /Chapter07/01 ApplicationTime.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter07/01 ApplicationTime.sql -------------------------------------------------------------------------------- /Chapter07/02 System Versioned Tables.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter07/02 System Versioned Tables.sql -------------------------------------------------------------------------------- /Chapter07/03 SCD.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter07/03 SCD.sql -------------------------------------------------------------------------------- /Chapter08/AEDemo.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo.exe -------------------------------------------------------------------------------- /Chapter08/AEDemo/.vs/AEDemo/v14/.suo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/.vs/AEDemo/v14/.suo -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 14 4 | VisualStudioVersion = 14.0.25420.1 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AEDemo", "AEDemo\AEDemo.csproj", "{C6DF3641-14F6-478D-A2B7-19E70FD2038E}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {C6DF3641-14F6-478D-A2B7-19E70FD2038E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {C6DF3641-14F6-478D-A2B7-19E70FD2038E}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {C6DF3641-14F6-478D-A2B7-19E70FD2038E}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {C6DF3641-14F6-478D-A2B7-19E70FD2038E}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | EndGlobal 23 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/AEDemo.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | Debug 6 | AnyCPU 7 | {C6DF3641-14F6-478D-A2B7-19E70FD2038E} 8 | Exe 9 | Properties 10 | AEDemo 11 | AEDemo 12 | v4.5.2 13 | 512 14 | true 15 | 16 | 17 | AnyCPU 18 | true 19 | full 20 | false 21 | bin\Debug\ 22 | DEBUG;TRACE 23 | prompt 24 | 4 25 | 26 | 27 | AnyCPU 28 | pdbonly 29 | true 30 | bin\Release\ 31 | TRACE 32 | prompt 33 | 4 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 60 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/App.config: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/Program.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Data; 4 | using System.Data.SqlClient; 5 | using System.Linq; 6 | using System.Text; 7 | using System.Threading.Tasks; 8 | 9 | namespace AEDemo 10 | { 11 | class Program 12 | { 13 | static void Main(string[] args) 14 | { 15 | string connectionString = "Data Source=localhost; Initial Catalog=AEDemo; Integrated Security=true; Column Encryption Setting=enabled"; 16 | SqlConnection connection = new SqlConnection(connectionString); 17 | connection.Open(); 18 | 19 | if (args.Length != 3) 20 | { 21 | Console.WriteLine("Please enter a numeric and two string arguments."); 22 | return; 23 | } 24 | int id = Int32.Parse(args[0]); 25 | 26 | { 27 | using (SqlCommand cmd = connection.CreateCommand()) 28 | { 29 | cmd.CommandText = @"INSERT INTO dbo.Table1 (id, SecretDeterministic, SecretRandomized)" + 30 | " VALUES (@id, @SecretDeterministic, @SecretRandomized);"; 31 | 32 | SqlParameter paramid= cmd.CreateParameter(); 33 | paramid.ParameterName = @"@id"; 34 | paramid.DbType = DbType.Int32; 35 | paramid.Direction = ParameterDirection.Input; 36 | paramid.Value = id; 37 | cmd.Parameters.Add(paramid); 38 | 39 | SqlParameter paramSecretDeterministic = cmd.CreateParameter(); 40 | paramSecretDeterministic.ParameterName = @"@SecretDeterministic"; 41 | paramSecretDeterministic.DbType = DbType.String; 42 | paramSecretDeterministic.Direction = ParameterDirection.Input; 43 | paramSecretDeterministic.Value = args[1]; 44 | paramSecretDeterministic.Size = 10; 45 | cmd.Parameters.Add(paramSecretDeterministic); 46 | 47 | SqlParameter paramSecretRandomized = cmd.CreateParameter(); 48 | paramSecretRandomized.ParameterName = @"@SecretRandomized"; 49 | paramSecretRandomized.DbType = DbType.String; 50 | paramSecretRandomized.Direction = ParameterDirection.Input; 51 | paramSecretRandomized.Value = args[2]; 52 | paramSecretRandomized.Size = 10; 53 | cmd.Parameters.Add(paramSecretRandomized); 54 | 55 | cmd.ExecuteNonQuery(); 56 | } 57 | } 58 | connection.Close(); 59 | Console.WriteLine("Row inserted successfully"); 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/Properties/AssemblyInfo.cs: -------------------------------------------------------------------------------- 1 | using System.Reflection; 2 | using System.Runtime.CompilerServices; 3 | using System.Runtime.InteropServices; 4 | 5 | // General Information about an assembly is controlled through the following 6 | // set of attributes. Change these attribute values to modify the information 7 | // associated with an assembly. 8 | [assembly: AssemblyTitle("AEDemo")] 9 | [assembly: AssemblyDescription("")] 10 | [assembly: AssemblyConfiguration("")] 11 | [assembly: AssemblyCompany("")] 12 | [assembly: AssemblyProduct("AEDemo")] 13 | [assembly: AssemblyCopyright("Copyright © 2016")] 14 | [assembly: AssemblyTrademark("")] 15 | [assembly: AssemblyCulture("")] 16 | 17 | // Setting ComVisible to false makes the types in this assembly not visible 18 | // to COM components. If you need to access a type in this assembly from 19 | // COM, set the ComVisible attribute to true on that type. 20 | [assembly: ComVisible(false)] 21 | 22 | // The following GUID is for the ID of the typelib if this project is exposed to COM 23 | [assembly: Guid("c6df3641-14f6-478d-a2b7-19e70fd2038e")] 24 | 25 | // Version information for an assembly consists of the following four values: 26 | // 27 | // Major Version 28 | // Minor Version 29 | // Build Number 30 | // Revision 31 | // 32 | // You can specify all the values or you can default the Build and Revision Numbers 33 | // by using the '*' as shown below: 34 | // [assembly: AssemblyVersion("1.0.*")] 35 | [assembly: AssemblyVersion("1.0.0.0")] 36 | [assembly: AssemblyFileVersion("1.0.0.0")] 37 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.exe -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.exe.config: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.pdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.pdb -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.vshost.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.vshost.exe -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.vshost.exe.config: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/bin/Debug/AEDemo.vshost.exe.manifest: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/AEDemo.csproj.FileListAbsolute.txt: -------------------------------------------------------------------------------- 1 | C:\SQL2016DevGuide\AEDemo\AEDemo\bin\Debug\AEDemo.exe.config 2 | C:\SQL2016DevGuide\AEDemo\AEDemo\bin\Debug\AEDemo.exe 3 | C:\SQL2016DevGuide\AEDemo\AEDemo\bin\Debug\AEDemo.pdb 4 | C:\SQL2016DevGuide\AEDemo\AEDemo\obj\Debug\AEDemo.csprojResolveAssemblyReference.cache 5 | C:\SQL2016DevGuide\AEDemo\AEDemo\obj\Debug\AEDemo.exe 6 | C:\SQL2016DevGuide\AEDemo\AEDemo\obj\Debug\AEDemo.pdb 7 | -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/AEDemo.csprojResolveAssemblyReference.cache: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/obj/Debug/AEDemo.csprojResolveAssemblyReference.cache -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/AEDemo.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/obj/Debug/AEDemo.exe -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/AEDemo.pdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/obj/Debug/AEDemo.pdb -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/DesignTimeResolveAssemblyReferencesInput.cache: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/obj/Debug/DesignTimeResolveAssemblyReferencesInput.cache -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/obj/Debug/TemporaryGeneratedFile_036C0B5B-1481-4323-8D20-8F5ADCB23D92.cs -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/obj/Debug/TemporaryGeneratedFile_5937a670-0e60-4077-877b-f7221da3dda1.cs -------------------------------------------------------------------------------- /Chapter08/AEDemo/AEDemo/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/AEDemo/AEDemo/obj/Debug/TemporaryGeneratedFile_E7A71F73-0F8D-4B9B-B56E-8E70B10BC5D3.cs -------------------------------------------------------------------------------- /Chapter08/Chapter08.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/Chapter08.sql -------------------------------------------------------------------------------- /Chapter08/Chapter08AE1.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/Chapter08AE1.sql -------------------------------------------------------------------------------- /Chapter08/Chapter08AE2.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/Chapter08AE2.sql -------------------------------------------------------------------------------- /Chapter08/Chapter08DDM.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/Chapter08DDM.sql -------------------------------------------------------------------------------- /Chapter08/Chapter08RLS.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/Chapter08RLS.sql -------------------------------------------------------------------------------- /Chapter08/Chapter08TDE.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter08/Chapter08TDE.sql -------------------------------------------------------------------------------- /Chapter09/QueryStore.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter09/QueryStore.sql -------------------------------------------------------------------------------- /Chapter10/Chapter10.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter10/Chapter10.sql -------------------------------------------------------------------------------- /Chapter11/B05694_11_01.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter11/B05694_11_01.sql -------------------------------------------------------------------------------- /Chapter12/B05694_12_01.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter12/B05694_12_01.sql -------------------------------------------------------------------------------- /Chapter13/Chapter13.R: -------------------------------------------------------------------------------- 1 | # ---------------------------------------------------- 2 | # -------- SQL Server 2016 Developer's Guide -------- 3 | # ----- Chapter 13 - Supporting R in SQL Server ----- 4 | # ---------------------------------------------------- 5 | 6 | # ---------------------------------------------------- 7 | # -- Section 1: Introducing R 8 | # ---------------------------------------------------- 9 | 10 | 11 | # R Contributors 12 | contributors(); 13 | 14 | # If you want to quit 15 | q(); 16 | 17 | # Getting help on help 18 | help(); 19 | # General help 20 | help.start(); 21 | # Help about global options 22 | help("options"); 23 | # Help on the function exp() 24 | help("exp"); 25 | ?"exp"; 26 | # Examples for the function exp() 27 | example("exp"); 28 | # Search 29 | help.search("constants"); 30 | ??"constants"; 31 | # Online search 32 | RSiteSearch("exp"); 33 | 34 | # Demonsstrate graphics capabilities 35 | demo("graphics"); 36 | 37 | # Pie chart example 38 | pie.sales <- c(0.12, 0.3, 0.26, 0.16, 0.04, 0.12); 39 | names(pie.sales) <- c("Blueberry", "Cherry", "Apple", 40 | "Boston Cream", "Other", "Vanilla Cream"); 41 | pie(pie.sales, 42 | col = c("purple","violetred1","green3","cornsilk","cyan","white")); 43 | 44 | title(main = "January Pie Sales", cex.main = 1.8, font.main = 1); 45 | title(xlab = "(Don't try this at home kids)", cex.lab = 0.8, font.lab = 3); 46 | 47 | # List of the current objects in the workspace 48 | objects(); 49 | ls(); 50 | 51 | sink("C:\\SQL2016DevGuide\\Ch13.txt") 52 | dev.off; 53 | sink() 54 | 55 | # Basic expressions 56 | 1 + 1; 57 | 2 + 3 * 4; 58 | 3 ^ 3; 59 | sqrt(81); 60 | pi; 61 | 62 | # Check the built-in constants 63 | ??"constants"; 64 | 65 | # Sequences 66 | rep(1,10); 67 | 3:7; 68 | seq(3,7); 69 | seq(5,17,by=3); 70 | 71 | 72 | # Variables 73 | x <- 2; 74 | y <- 3; 75 | z <- 4; 76 | x + y * z; 77 | # Names are case-sensitive 78 | X + Y + Z; 79 | # Can use period 80 | This.Year <- 2016; 81 | This.Year; 82 | # Equals as an assigment operator 83 | x = 2; 84 | y = 3; 85 | z = 4; 86 | x + y * z; 87 | # Boolean equality test 88 | x <- 2; 89 | x == 2; 90 | 91 | 92 | # Vectors 93 | x <- c(2,0,0,4); 94 | assign("y", c(1,9,9,9)); 95 | c(5,4,3,2) -> z; 96 | q = c(1,2,3,4); 97 | # Vector operations 98 | x + y; 99 | x * 4; 100 | sqrt(x); 101 | 102 | # Vector elements 103 | x <- c(2,0,0,4); 104 | x[1]; # Select the first element 105 | x[-1]; # Exclude the first element 106 | x[1] <- 3; x; # Assign a value to the first element 107 | x[-1] = 5; x; # Assign a value to all other elements 108 | y <- c(1,9,9,9); 109 | y < 8; # Compares each element, returns result as vector 110 | y[4] = 1; 111 | y < 8; 112 | y[y<8] = 2; y # Edits elements marked as TRUE in index vector 113 | 114 | # Check the installed packages 115 | installed.packages(); 116 | # Library location 117 | .libPaths(); 118 | library(); 119 | 120 | # Reading from SQL Server 121 | # Install RODBC library 122 | install.packages("RODBC"); 123 | # Load RODBC library 124 | library(RODBC); 125 | # Getting help about RODBC 126 | help(package = "RODBC"); 127 | 128 | # Connect to WWIDW 129 | # WWIDW system DSN created in advance 130 | con <- odbcConnect("WWIDW", uid="RUser", pwd="Pa$$w0rd"); 131 | sqlQuery(con, 132 | "SELECT c.Customer, 133 | SUM(f.Quantity) AS TotalQuantity, 134 | SUM(f.[Total Excluding Tax]) AS TotalAmount, 135 | COUNT(*) AS SalesCount 136 | FROM Fact.Sale AS f 137 | INNER JOIN Dimension.Customer AS c 138 | ON f.[Customer Key] = c.[Customer Key] 139 | WHERE c.[Customer Key] <> 0 140 | GROUP BY c.Customer 141 | HAVING COUNT(*) > 400 142 | ORDER BY SalesCount DESC;"); 143 | 144 | 145 | # ---------------------------------------------------- 146 | # -- Section 2: Manipulating data 147 | # ---------------------------------------------------- 148 | 149 | 150 | # Matrix 151 | x = c(1,2,3,4,5,6); x; # A simple vector 152 | Y = array(x, dim=c(2,3)); Y; # A matrix from the vector - fill by columns 153 | Z = matrix(x,2,3,byrow=F); Z; # A matrix from the vector - fill by columns 154 | U = matrix(x,2,3,byrow=T); U; # A matrix from the vector - fill by rows 155 | rnames = c("Row1", "Row2"); 156 | cnames = c("Col1", "Col2", "Col3"); 157 | V = matrix(x,2,3,byrow=T, dimnames = list(rnames, cnames)); V; # names 158 | 159 | # Elements of a matrix 160 | U[1,]; 161 | U[1,c(2,3)]; 162 | U[,c(2,3)]; 163 | V[,c("Col2", "Col3")]; 164 | 165 | # Array 166 | rnames = c("Row1", "Row2"); 167 | cnames = c("Col1", "Col2", "Col3"); 168 | pnames = c("Page1", "Page2", "Page3"); 169 | Y = array(1:18, dim=c(2,3,3), dimnames = list(rnames, cnames, pnames)); Y; 170 | 171 | 172 | # Factor 173 | x = c("good", "moderate", "good", "bad", "bad", "good"); 174 | y = factor(x); y; 175 | z = factor(x, order=TRUE); z; 176 | w = factor(x, order=TRUE, 177 | levels=c("bad", "moderate","good")); w; 178 | 179 | # List 180 | L = list(name1="ABC", name2="DEF", 181 | no.children=2, children.ages=c(3,6)); 182 | L; 183 | L[[1]]; 184 | L[[4]]; 185 | L[[4]][2]; 186 | 187 | # Data frame 188 | CategoryId = c(1,2,3,4); 189 | CategoryName = c("Bikes", "Components", "Clothing", "Accessories"); 190 | ProductCategories = data.frame(CategoryId, CategoryName); 191 | ProductCategories; 192 | 193 | # Reading a data frame from a CSV file 194 | TM = read.table("C:\\SQL2016DevGuide\\Chapter13_TM.csv", 195 | sep=",", header=TRUE, row.names = "CustomerKey", 196 | stringsAsFactors = TRUE); 197 | TM[1:5,1:4]; 198 | 199 | # Accessing data in a data frame 200 | TM[1:2]; # Two columns 201 | TM[c("MaritalStatus", "Gender")]; # Two columns 202 | TM[1:3,1:2]; # Three rows, two columns 203 | TM[1:3,c("MaritalStatus", "Gender")]; # Three rows, two columns 204 | 205 | # $ Notation 206 | table(TM$MaritalStatus, TM$Gender); 207 | attach(TM); 208 | table(MaritalStatus, Gender); 209 | detach(TM); 210 | with(TM, 211 | {table(MaritalStatus, Gender)}); 212 | 213 | 214 | # Value labels 215 | table(TM$BikeBuyer, TM$Gender); 216 | TM$BikeBuyer <- factor(TM$BikeBuyer, 217 | levels = c(0,1), 218 | labels = c("No","Yes")); 219 | table(TM$BikeBuyer, TM$Gender); 220 | 221 | # Metadata 222 | class(TM); 223 | names(TM); 224 | length(TM); 225 | dim(TM); 226 | str(TM); 227 | 228 | # Recoding and adding variables 229 | TM <- within(TM, { 230 | MaritalStatusInt <- NA 231 | MaritalStatusInt[MaritalStatus == "S"] <- 0 232 | MaritalStatusInt[MaritalStatus == "M"] <- 1 233 | }); 234 | str(TM); 235 | 236 | # Changing the data type 237 | TM$MaritalStatusInt <- as.integer(TM$MaritalStatusInt); 238 | str(TM); 239 | 240 | # Adding another variable 241 | TM$HouseholdNumber = as.integer( 242 | 1 + TM$MaritalStatusInt + TM$NumberChildrenAtHome); 243 | str(TM); 244 | 245 | # Missing values 246 | x <- c(1,2,3,4,5,NA); 247 | is.na(x); 248 | mean(x); 249 | mean(x, na.rm = TRUE); 250 | 251 | # Projection datasets 252 | # Re-read the TM dataset without row.names 253 | TM = read.table("C:\\SQL2016DevGuide\\Chapter13_TM.csv", 254 | sep=",", header=TRUE, 255 | stringsAsFactors = TRUE); 256 | TM[1:3,1:3]; 257 | cols1 <- c("CustomerKey", "MaritalStatus"); 258 | TM1 <- TM[cols1]; 259 | cols2 <- c("CustomerKey", "Gender"); 260 | TM2 <- TM[cols2]; 261 | TM1[1:3, 1:2]; 262 | TM2[1:3, 1:2]; 263 | 264 | # Merge datasets 265 | TM3 <- merge(TM1, TM2, by = "CustomerKey"); 266 | TM3[1:3, 1:3]; 267 | 268 | # Binding datasets 269 | TM4 <- cbind(TM1, TM2); 270 | TM4[1:3, 1:4]; 271 | 272 | # Filtering and row binding data 273 | TM1 <- TM[TM$CustomerKey < 11002, cols1]; 274 | TM2 <- TM[TM$CustomerKey > 29481, cols1]; 275 | TM5 <- rbind(TM1, TM2); 276 | TM5; 277 | 278 | # Sort 279 | TMSortedByAge <- TM[order(-TM$Age),c("CustomerKey", "Age")]; 280 | TMSortedByAge[1:5,1:2]; 281 | 282 | 283 | # ---------------------------------------------------- 284 | # -- Section 3: Understanding the data 285 | # ---------------------------------------------------- 286 | 287 | # Re-read the TM dataset 288 | TM = read.table("C:\\SQL2016DevGuide\\Chapter13_TM.csv", 289 | sep=",", header=TRUE, 290 | stringsAsFactors = TRUE); 291 | attach(TM); 292 | 293 | # A simple distribution 294 | plot(Education); 295 | 296 | # Education is ordered 297 | Education = factor(Education, order=TRUE, 298 | levels=c("Partial High School", 299 | "High School","Partial College", 300 | "Bachelors", "Graduate Degree")); 301 | plot(Education, main = 'Education', 302 | xlab='Education', ylab ='Number of Cases', 303 | col="purple"); 304 | 305 | 306 | # Generating a subset data frame 307 | cols1 <- c("CustomerKey", "NumberCarsOwned", "TotalChildren"); 308 | TM1 <- TM[TM$CustomerKey < 11010, cols1]; 309 | names(TM1) <- c("CustomerKey1", "NumberCarsOwned1", "TotalChildren1"); 310 | attach(TM1); 311 | 312 | # Generating a table from NumberCarsOwned and BikeBuyer 313 | nofcases <- table(NumberCarsOwned, BikeBuyer); 314 | nofcases; 315 | 316 | # Saving parameters 317 | oldpar <- par(no.readonly = TRUE); 318 | 319 | # Defining a 2x2 graph 320 | par(mfrow=c(2,2)); 321 | 322 | # Education and marital status 323 | plot(Education, MaritalStatus, 324 | main='Education and marital status', 325 | xlab='Education', ylab ='Marital Status', 326 | col=c("blue", "yellow")); 327 | 328 | # Histogram with a title and axis labels and color 329 | hist(NumberCarsOwned, main = 'Number of cars owned', 330 | xlab='Number of Cars Owned', ylab ='Number of Cases', 331 | col="blue"); 332 | 333 | # Plot with two lines, title, legend, and axis legends 334 | plot_colors=c("blue", "red"); 335 | plot(TotalChildren1, 336 | type="o",col='blue', lwd=2, 337 | xlab="Key",ylab="Number"); 338 | lines(NumberCarsOwned1, 339 | type="o",col='red', lwd=2); 340 | legend("topleft", 341 | c("TotalChildren", "NumberCarsOwned"), 342 | cex=1.4,col=plot_colors,lty=1:2,lwd=1, bty="n"); 343 | title(main="Total children and number of cars owned line chart", 344 | col.main="DarkGreen", font.main=4); 345 | 346 | # NumberCarsOwned and BikeBuyer grouped bars 347 | barplot(nofcases, 348 | main='Number of cars owned and bike buyer gruped', 349 | xlab='BikeBuyer', ylab ='NumberCarsOwned', 350 | legend=rownames(nofcases), 351 | col=c("black", "blue", "red", "orange", "yellow"), 352 | beside=TRUE); 353 | 354 | # Restoring the default graphical parameters 355 | par(oldpar); 356 | 357 | # removing the data frames from the search path 358 | detach(TM); 359 | detach(TM1); 360 | 361 | 362 | # Descriptive statistics 363 | # Re-read the TM dataset 364 | TM = read.table("C:\\SQL2016DevGuide\\Chapter13_TM.csv", 365 | sep=",", header=TRUE, 366 | stringsAsFactors = TRUE); 367 | attach(TM); 368 | # Education is ordered 369 | Education = factor(Education, order=TRUE, 370 | levels=c("Partial High School", 371 | "High School","Partial College", 372 | "Bachelors", "Graduate Degree")); 373 | 374 | # A quick summary for the whole dataset 375 | summary(TM); 376 | 377 | # A quick summary for Age 378 | summary(Age); 379 | # Details for Age 380 | mean(Age); 381 | median(Age); 382 | min(Age); 383 | max(Age); 384 | range(Age); 385 | quantile(Age, 1/4); 386 | quantile(Age, 3/4); 387 | IQR(Age); 388 | var(Age); 389 | sd(Age); 390 | 391 | # Skewness and kurtosis - package moments 392 | install.packages("moments"); 393 | library(moments); 394 | skewness(Age); 395 | kurtosis(Age); 396 | 397 | # Custom function for skewness and kurtosis 398 | skewkurt <- function(p){ 399 | avg <- mean(p) 400 | cnt <- length(p) 401 | stdev <- sd(p) 402 | skew <- sum((p-avg)^3/stdev^3)/cnt 403 | kurt <- sum((p-avg)^4/stdev^4)/cnt-3 404 | return(c(skewness=skew, kurtosis=kurt)) 405 | }; 406 | skewkurt(Age); 407 | 408 | # Frequencies 409 | # Summary gives absolute frequencies only 410 | summary(Education); 411 | # table and table.prop 412 | edt <- table(Education); 413 | edt; 414 | prop.table(edt); 415 | 416 | # Package descr 417 | install.packages("descr"); 418 | library(descr); 419 | freq(Education); 420 | 421 | # Clean up 422 | detach(TM); 423 | 424 | 425 | # ---------------------------------------------------- 426 | # -- Section 4: SQL Server R Services 427 | # ---------------------------------------------------- 428 | 429 | 430 | # Set the execution context to the server 431 | # Define SQL Server connection string 432 | sqlConnStr <- "Driver=SQL Server;Server=localhost; 433 | Database=AdventureWorksDW2014;Uid=RUser;Pwd=Pa$$w0rd"; 434 | # Share to exchange data with SQL Server 435 | sqlShare <- "C:\\SQL2016DevGuide"; 436 | # Define the chunk size 437 | chunkSize = 1000; 438 | # Create a server execution context 439 | srvEx <- RxInSqlServer(connectionString = sqlConnStr, shareDir = sqlShare, 440 | wait = TRUE, consoleOutput = FALSE); 441 | rxSetComputeContext(srvEx); 442 | 443 | # Import the data from a .CSV file 444 | TMCSV = rxImport(inData = "C:\\SQL2016DevGuide\\Chapter13_TM.csv", 445 | stringsAsFactors = TRUE, type = "auto", 446 | rowsPerRead = chunkSize, reportProgress = 3); 447 | 448 | # A query 449 | TMquery <- 450 | "SELECT CustomerKey, MaritalStatus, Gender, 451 | TotalChildren, NumberChildrenAtHome, 452 | EnglishEducation AS Education, 453 | EnglishOccupation AS Occupation, 454 | HouseOwnerFlag, NumberCarsOwned, CommuteDistance, 455 | Region, BikeBuyer, 456 | YearlyIncome, Age 457 | FROM dbo.vTargetMail"; 458 | 459 | # Generate SqlServer data source object 460 | sqlTM <- RxSqlServerData(sqlQuery = TMquery, 461 | connectionString = sqlConnStr, 462 | stringsAsFactors = TRUE, 463 | rowsPerRead = chunkSize); 464 | 465 | # Import data to a data frame 466 | TMSQL <- rxImport(inData = sqlTM, reportProgress = 3); 467 | 468 | 469 | # Info about the SQL data source and the data frames with imported data 470 | rxGetInfo(TMSQL); 471 | rxGetInfo(TMCSV); 472 | rxGetInfo(sqlTM); 473 | 474 | # Info about the variables 475 | rxGetVarInfo(sqlTM); 476 | 477 | # Compute summary statistics 478 | sumOut <- rxSummary( 479 | formula = ~ NumberCarsOwned + Occupation + F(BikeBuyer), 480 | data = sqlTM); 481 | sumOut; 482 | 483 | # Crosstabulation object 484 | cTabs <- rxCrossTabs(formula = BikeBuyer ~ 485 | Occupation : F(HouseOwnerFlag), 486 | data = sqlTM); 487 | # Check the results 488 | print(cTabs, output = "counts"); 489 | print(cTabs, output = "sums"); 490 | print(cTabs, output = "means"); 491 | summary(cTabs, output = "sums"); 492 | summary(cTabs, output = "counts"); 493 | summary(cTabs, output = "means"); 494 | 495 | # Crosstabulation in a different way 496 | cCube <- rxCube(formula = BikeBuyer ~ 497 | Occupation : F(HouseOwnerFlag), 498 | data = sqlTM); 499 | # Check the results 500 | cCube; 501 | 502 | # Histogram 503 | rxHistogram(formula = ~ BikeBuyer | MaritalStatus, 504 | data = sqlTM); 505 | 506 | # Set the compute context back to local 507 | rxSetComputeContext("local"); 508 | 509 | 510 | # K-Means Clustering 511 | TwoClust <- rxKmeans(formula = ~ BikeBuyer + TotalChildren + NumberCarsOwned, 512 | data = TMSQL, 513 | numClusters = 2); 514 | summary(TwoClust); 515 | 516 | 517 | # Add cluster membership to the original data frame and rename the variable 518 | TMClust <- cbind(TMSQL, TwoClust$cluster); 519 | names(TMClust)[15] <- "ClusterID"; 520 | 521 | # Attach the new data frame 522 | attach(TMClust); 523 | 524 | # Saving parameters 525 | oldpar <- par(no.readonly = TRUE); 526 | 527 | # Defining a 1x3 graph 528 | par(mfrow=c(1,3)); 529 | 530 | # NumberCarsOwned and clusters 531 | nofcases <- table(NumberCarsOwned, ClusterID); 532 | nofcases; 533 | barplot(nofcases, 534 | main='Number of cars owned and cluster ID', 535 | xlab='Cluster Id', ylab ='Number of Cars', 536 | legend=rownames(nofcases), 537 | col=c("black", "blue", "red", "orange", "yellow"), 538 | beside=TRUE); 539 | # BikeBuyer and clusters 540 | nofcases <- table(BikeBuyer, ClusterID); 541 | nofcases; 542 | barplot(nofcases, 543 | main='Bike buyer and cluster ID', 544 | xlab='Cluster Id', ylab ='BikeBuyer', 545 | legend=rownames(nofcases), 546 | col=c("blue", "yellow"), 547 | beside=TRUE); 548 | # TotalChildren and clusters 549 | nofcases <- table(TotalChildren, ClusterID); 550 | nofcases; 551 | barplot(nofcases, 552 | main='Total children and cluster ID', 553 | xlab='Cluster Id', ylab ='Total Children', 554 | legend=rownames(nofcases), 555 | col=c("black", "blue", "green", "red", "orange", "yellow"), 556 | beside=TRUE); 557 | 558 | # Clean up 559 | par(oldpar); 560 | detach(TMClust); 561 | 562 | 563 | # Create a Logistic Regression model to predict BikeBuyer 564 | # Set compute context back to SQL Server 565 | rxSetComputeContext(srvEx); 566 | # Create the model 567 | bbLogR <- rxLogit(BikeBuyer ~ 568 | NumberCarsOwned + TotalChildren + Age + YearlyIncome, 569 | data = sqlTM); 570 | # See the summary of the model 571 | summary(bbLogR); 572 | 573 | # Prepare a SQL Server table for storing predictions 574 | bbLogRPredict <- RxSqlServerData(connectionString = sqlConnStr, 575 | table = "dbo.TargetMailLogR"); 576 | 577 | # Store the predictions in SQL Server 578 | rxPredict(modelObject = bbLogR, 579 | data = sqlTM, outData = bbLogRPredict, 580 | predVarNames = "BikeBuyerPredict", 581 | type = "response", writeModelVars = TRUE); 582 | 583 | # Store the model in SQL Server 584 | library(RODBC); 585 | conn <- odbcDriverConnect(sqlConnStr); 586 | 587 | # Serialize a model 588 | modelbin <- serialize(bbLogR, NULL); 589 | modelbinstr=paste(modelbin, collapse=""); 590 | 591 | # persist model by calling a stored procedure from SQL Server 592 | sqlQ <- paste("EXEC dbo.InsertModel @modelname='bbLogR', @model='", 593 | modelbinstr,"'", sep=""); 594 | sqlQuery(conn, sqlQ); 595 | 596 | # End of script 597 | 598 | -------------------------------------------------------------------------------- /Chapter13/Chapter13.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/SQL-Server-2016-Developers-Guide/675d6287f88e50ab4531d03986678d192bea979f/Chapter13/Chapter13.sql -------------------------------------------------------------------------------- /Chapter13/Code file.txt: -------------------------------------------------------------------------------- 1 | https://onedrive.live.com/?authkey=%21ADNHLY6wQVBpmPs&id=42DD61CD766F0BC0%21442661&cid=42DD61CD766F0BC0 -------------------------------------------------------------------------------- /Chapter14/Chapter14.R: -------------------------------------------------------------------------------- 1 | # -------------------------------------------------------------------------------------- 2 | # -------- SQL Server 2016 Developer's Guide ------------------------------------------ 3 | # ----- Chapter 14 - Data Exploration and Predictive Modeling with R in SQL Server ----- 4 | # -------------------------------------------------------------------------------------- 5 | 6 | # ---------------------------------------------------- 7 | # -- Section 1: Intermediate Statistics - Associations 8 | # ---------------------------------------------------- 9 | 10 | 11 | # Importing Target mail data 12 | # Reading a data frame from a CSV file and attaching it 13 | TM = read.table("C:\\SQL2016DevGuide\\Chapter14_TM.csv", 14 | sep=",", header=TRUE, 15 | stringsAsFactors = TRUE); 16 | attach(TM); 17 | 18 | 19 | # Education is ordered 20 | Education = factor(Education, order=TRUE, 21 | levels=c("Partial High School", 22 | "High School","Partial College", 23 | "Bachelors", "Graduate Degree")); 24 | plot(Education, main = 'Education', 25 | xlab='Education', ylab ='Number of Cases', 26 | col="purple"); 27 | 28 | # Crosstabulation with table() and xtabs() 29 | table(Education, Gender, BikeBuyer); 30 | table(NumberCarsOwned, BikeBuyer); 31 | xtabs(~Education + Gender + BikeBuyer); 32 | xtabs(~NumberCarsOwned + BikeBuyer); 33 | 34 | # Storing tables in objects 35 | tEduGen <- xtabs(~ Education + Gender); 36 | tNcaBik <- xtabs(~ NumberCarsOwned + BikeBuyer); 37 | 38 | # Test of independece 39 | chisq.test(tEduGen); 40 | chisq.test(tNcaBik); 41 | 42 | summary(tEduGen); 43 | summary(tNcaBik); 44 | 45 | # Installing and loading the vcd package 46 | install.packages("vcd"); 47 | library(vcd); 48 | 49 | # Measures of association 50 | assocstats(tEduGen); 51 | assocstats(tNcaBik); 52 | 53 | # Visualizing the crosstabulation 54 | # Showing expected and observed frequencies 55 | strucplot(tNcaBik, residuals = NULL, shade = TRUE, 56 | gp = gpar(fill=c("yellow", "blue")), 57 | type = "expected", main = "Expected"); 58 | strucplot(tNcaBik, residuals = NULL, shade = TRUE, 59 | gp = gpar(fill=c("yellow", "blue")), 60 | type = "observed", main = "Observed"); 61 | 62 | 63 | # Covariance and correlations 64 | 65 | # Pearson 66 | x <- TM[,c("YearlyIncome", "Age", "NumberCarsOwned")]; 67 | cov(x); 68 | cor(x); 69 | 70 | # Spearman 71 | y <- TM[,c("TotalChildren", "NumberChildrenAtHome", "HouseOwnerFlag", "BikeBuyer")]; 72 | cor(y); 73 | cor(y, method = "spearman"); 74 | 75 | # Two matrices correlations 76 | cor(y,x); 77 | 78 | # Visualizing the correlations 79 | install.packages("corrgram"); 80 | library(corrgram); 81 | corrgram(y, order = TRUE, lower.panel = panel.shade, 82 | upper.panel = panel.shade, text.panel = panel.txt, 83 | cor.method = "spearman", main = "Corrgram"); 84 | 85 | 86 | # Continuous and discrete variables 87 | 88 | # T-test 89 | t.test(YearlyIncome ~ Gender); 90 | t.test(YearlyIncome ~ HouseOwnerFlag); 91 | # Error - t-test supports only two groups 92 | t.test(YearlyIncome ~ Education); 93 | 94 | # Visualizing the associations 95 | boxplot(YearlyIncome ~ Gender, 96 | main = "Yearly Income in Groups", 97 | ylab = "Yearly Income", 98 | xlab = "Gender"); 99 | boxplot(YearlyIncome ~ HouseOwnerFlag, 100 | main = "Yearly Income in Groups", 101 | notch = TRUE, 102 | varwidth = TRUE, 103 | col = "orange", 104 | ylab = "Yearly Income", 105 | xlab = "House Owner Flag"); 106 | 107 | 108 | # Don't forget - Education is ordered 109 | Education = factor(Education, order=TRUE, 110 | levels=c("Partial High School", 111 | "High School","Partial College", 112 | "Bachelors", "Graduate Degree")); 113 | # One-way ANOVA 114 | aggregate(YearlyIncome, by = list(Education), FUN = mean); 115 | aggregate(YearlyIncome, by = list(Education), FUN = sd); 116 | AssocTest <- aov(YearlyIncome ~ Education); 117 | summary(AssocTest); 118 | 119 | # Visualizing ANOVA 120 | boxplot(YearlyIncome ~ Education, 121 | main = "Yearly Income in Groups", 122 | notch = TRUE, 123 | varwidth = TRUE, 124 | col = "orange", 125 | ylab = "Yearly Income", 126 | xlab = "Education"); 127 | 128 | # Load gplots 129 | library(gplots); 130 | plotmeans(YearlyIncome ~ Education, 131 | bars = TRUE, p = 0.99, barwidth = 3, 132 | col = "red", lwd = 3, 133 | main = "Yearly Income in Groups", 134 | ylab = "Yearly Income", 135 | xlab = "Education") 136 | 137 | 138 | # A smaller data frame for the purpose of graph 139 | TMLM <- TM[1:100, c("YearlyIncome", "Age")]; 140 | # Removing the TM data frame from the search path 141 | detach(TM); 142 | # Adding the smaller data frame to the search path 143 | attach(TMLM); 144 | 145 | # Plot the data points 146 | plot(Age, YearlyIncome, 147 | cex = 2, col = "orange", lwd = 2); 148 | 149 | # Simple linear regression model 150 | LinReg1 <- lm(YearlyIncome ~ Age); 151 | summary(LinReg1); 152 | 153 | # Polynomial regression 154 | LinReg2 <- lm(YearlyIncome ~ Age + I(Age ^ 2)); 155 | summary(LinReg2); 156 | 157 | # Visualization 158 | plot(Age, YearlyIncome, 159 | cex = 2, col = "orange", lwd = 2); 160 | abline(LinReg1, 161 | col = "red", lwd = 2); 162 | lines(lowess(Age, YearlyIncome), 163 | col = "blue", lwd = 2); 164 | 165 | # Removing the smaller data frame from the search path 166 | detach(TMLM); 167 | 168 | 169 | # ---------------------------------------------------- 170 | # -- Section 2: PCA, EFA, and Clustering - Undirected 171 | # ---------------------------------------------------- 172 | 173 | 174 | # In case it is needed - re-read the TM data 175 | TM = read.table("C:\\SQL2016DevGuide\\Chapter14_TM.csv", 176 | sep=",", header=TRUE, 177 | stringsAsFactors = TRUE); 178 | 179 | # Extracting numerical data only 180 | TMPCAEFA <- TM[, c("TotalChildren", "NumberChildrenAtHome", 181 | "HouseOwnerFlag", "NumberCarsOwned", 182 | "BikeBuyer", "YearlyIncome", "Age")]; 183 | 184 | # PCA from the base installation 185 | pcaBasic <- princomp(TMPCAEFA, cor = TRUE); 186 | summary(pcaBasic); 187 | plot(pcaBasic, main = "PCA Basic", col = "blue"); 188 | 189 | 190 | # Package psych functions used for PCA and EFA 191 | install.packages("psych"); 192 | library(psych); 193 | 194 | # PCA unrotated 195 | pcaTM_unrotated <- principal(TMPCAEFA, nfactors = 2, rotate = "none"); 196 | pcaTM_unrotated; 197 | 198 | # PCA varimax rotation 199 | pcaTM_varimax <- principal(TMPCAEFA, nfactors = 2, rotate = "varimax"); 200 | pcaTM_varimax; 201 | 202 | # Biplots 203 | biplot.psych(pcaTM_unrotated, cex = c(0.1,2), main = "PCA Unrotated"); 204 | biplot.psych(pcaTM_varimax, cex = c(0.1,2), main = "PCA Varimax"); 205 | 206 | 207 | # EFA unrotated 208 | efaTM_unrotated <- fa(TMPCAEFA, nfactors = 2, rotate = "none"); 209 | efaTM_unrotated; 210 | 211 | # EFA varimax 212 | efaTM_varimax <- fa(TMPCAEFA, nfactors = 2, rotate = "varimax"); 213 | efaTM_varimax; 214 | 215 | # EFA promax 216 | efaTM_promax <- fa(TMPCAEFA, nfactors = 2, rotate = "promax"); 217 | efaTM_promax; 218 | 219 | # Plots 220 | factor.plot(efaTM_unrotated, 221 | labels = rownames(efaTM_unrotated$loadings), 222 | title = "EFA Unrotated"); 223 | factor.plot(efaTM_varimax, 224 | labels = rownames(efaTM_varimax$loadings), 225 | title = "EFA Varimax"); 226 | factor.plot(efaTM_promax, 227 | labels = rownames(efaTM_promax$loadings), 228 | title = "EFA Promax"); 229 | fa.diagram(efaTM_unrotated, simple = FALSE, 230 | main = "EFA Unrotated"); 231 | fa.diagram(efaTM_varimax, simple = FALSE, 232 | main = "EFA Varimax"); 233 | fa.diagram(efaTM_promax, simple = FALSE, 234 | main = "EFA Promax"); 235 | 236 | 237 | # Clustering 238 | 239 | # Hierarchical clustering 240 | # Subset of the data 241 | TM50 <- TM[sample(1:nrow(TM), 50, replace=FALSE), 242 | c("TotalChildren", "NumberChildrenAtHome", 243 | "HouseOwnerFlag", "NumberCarsOwned", 244 | "BikeBuyer", "YearlyIncome", "Age")]; 245 | # create a distance matrix from the data 246 | ds <- dist(TM50, method = "euclidean") ; 247 | # Hierarchical clustering model 248 | TMCL <- hclust(ds, method="ward.D2"); 249 | # Display the dendrogram 250 | plot(TMCL, xlab = NULL, ylab = NULL); 251 | # Cut tree into 2 clusters 252 | groups <- cutree(TMCL, k = 2); 253 | # Draw red borders around the 2 clusters 254 | rect.hclust(TMCL, k = 2, border = "red"); 255 | 256 | # K-Means clustering example in chapter 13 257 | 258 | 259 | # ---------------------------------------------------- 260 | # -- Section 3: LogReg, DTrees - Directed 261 | # ---------------------------------------------------- 262 | 263 | # In case it is needed - re-read the TM data 264 | TM = read.table("C:\\SQL2016DevGuide\\Chapter14_TM.csv", 265 | sep=",", header=TRUE, 266 | stringsAsFactors = TRUE); 267 | 268 | # Education is ordered 269 | TM$Education = factor(TM$Education, order=TRUE, 270 | levels=c("Partial High School", 271 | "High School","Partial College", 272 | "Bachelors", "Graduate Degree")); 273 | 274 | # Giving labels to BikeBuyer values 275 | TM$BikeBuyer <- factor(TM$BikeBuyer, 276 | levels = c(0,1), 277 | labels = c("No","Yes")); 278 | 279 | 280 | # Preparing the training and test sets 281 | 282 | # Setting the seed to make the split reproducible 283 | set.seed(1234); 284 | # Split the data set 285 | train <- sample(nrow(TM), 0.7 * nrow(TM)); 286 | TM.train <- TM[train,]; 287 | TM.test <- TM[-train,]; 288 | # Checking the split 289 | table(TM.train$BikeBuyer); 290 | table(TM.test$BikeBuyer); 291 | 292 | # Logistic regression from the base installation 293 | # Three input variables only 294 | TMLogR <- glm(BikeBuyer ~ 295 | YearlyIncome + Age + NumberCarsOwned, 296 | data=TM.train, family=binomial()); 297 | 298 | # Test the model 299 | probLR <- predict(TMLogR, TM.test, type = "response"); 300 | predLR <- factor(probLR > 0.5, 301 | levels = c(FALSE, TRUE), 302 | labels = c("No","Yes")); 303 | perfLR <- table(TM.test$BikeBuyer, predLR, 304 | dnn = c("Actual", "Predicted")); 305 | perfLR; 306 | # Not good 307 | 308 | # Logistic regression from the base installation 309 | # All input variables 310 | TMLogR <- glm(BikeBuyer ~ 311 | MaritalStatus + Gender + 312 | TotalChildren + NumberChildrenAtHome + 313 | Education + Occupation + 314 | HouseOwnerFlag + NumberCarsOwned + 315 | CommuteDistance + Region + 316 | YearlyIncome + Age, 317 | data=TM.train, family=binomial()); 318 | 319 | # Test the model 320 | probLR <- predict(TMLogR, TM.test, type = "response"); 321 | predLR <- factor(probLR > 0.5, 322 | levels = c(FALSE, TRUE), 323 | labels = c("No","Yes")); 324 | perfLR <- table(TM.test$BikeBuyer, predLR, 325 | dnn = c("Actual", "Predicted")); 326 | perfLR; 327 | # Slightly better 328 | 329 | # Manually define other factors 330 | TM$TotalChildren = factor(TM$TotalChildren, order=TRUE); 331 | TM$NumberChildrenAtHome = factor(TM$NumberChildrenAtHome, order=TRUE); 332 | TM$NumberCarsOwned = factor(TM$NumberCarsOwned, order=TRUE); 333 | TM$HouseOwnerFlag = factor(TM$HouseOwnerFlag, order=TRUE); 334 | 335 | # Repeating the split 336 | # Setting the seed to make the split reproducible 337 | set.seed(1234); 338 | # Split the data set 339 | train <- sample(nrow(TM), 0.7 * nrow(TM)); 340 | TM.train <- TM[train,]; 341 | TM.test <- TM[-train,]; 342 | # Checking the split 343 | table(TM.train$BikeBuyer); 344 | table(TM.test$BikeBuyer); 345 | 346 | # Logistic regression from the base installation 347 | # All input variables, factors defined manually 348 | TMLogR <- glm(BikeBuyer ~ 349 | MaritalStatus + Gender + 350 | TotalChildren + NumberChildrenAtHome + 351 | Education + Occupation + 352 | HouseOwnerFlag + NumberCarsOwned + 353 | CommuteDistance + Region + 354 | YearlyIncome + Age, 355 | data=TM.train, family=binomial()); 356 | 357 | # Test the model 358 | probLR <- predict(TMLogR, TM.test, type = "response"); 359 | predLR <- factor(probLR > 0.5, 360 | levels = c(FALSE, TRUE), 361 | labels = c("No","Yes")); 362 | perfLR <- table(TM.test$BikeBuyer, predLR, 363 | dnn = c("Actual", "Predicted")); 364 | perfLR; 365 | # Again, slightly better 366 | 367 | 368 | # Decision trees from the base installation 369 | TMDTree <- rpart(BikeBuyer ~ MaritalStatus + Gender + 370 | TotalChildren + NumberChildrenAtHome + 371 | Education + Occupation + 372 | HouseOwnerFlag + NumberCarsOwned + 373 | CommuteDistance + Region + 374 | YearlyIncome + Age, 375 | method="class", data=TM.train); 376 | 377 | # Plot the tree 378 | install.packages("rpart.plot"); 379 | library(rpart.plot); 380 | prp(TMDTree, type = 2, extra = 104, fallen.leaves = FALSE); 381 | 382 | # Predictions on the test data set 383 | predDT <- predict(TMDTree, TM.test, type = "class"); 384 | perfDT <- table(TM.test$BikeBuyer, predDT, 385 | dnn = c("Actual", "Predicted")); 386 | perfDT; 387 | # Somehow better 388 | 389 | # Package party (Decision Trees) 390 | install.packages("party", dependencies = TRUE); 391 | library("party"); 392 | 393 | # Train the model with defaults 394 | TMDT <- ctree(BikeBuyer ~ MaritalStatus + Gender + 395 | TotalChildren + NumberChildrenAtHome + 396 | Education + Occupation + 397 | HouseOwnerFlag + NumberCarsOwned + 398 | CommuteDistance + Region + 399 | YearlyIncome + Age, 400 | data=TM.train); 401 | 402 | # Predictions 403 | predDT <- predict(TMDT, TM.test, type = "response"); 404 | perfDT <- table(TM.test$BikeBuyer, predDT, 405 | dnn = c("Actual", "Predicted")); 406 | perfDT; 407 | # Much better 408 | 409 | # Train the model with more splits forced 410 | TMDT <- ctree(BikeBuyer ~ MaritalStatus + Gender + 411 | TotalChildren + NumberChildrenAtHome + 412 | Education + Occupation + 413 | HouseOwnerFlag + NumberCarsOwned + 414 | CommuteDistance + Region + 415 | YearlyIncome + Age, 416 | data=TM.train, 417 | controls = ctree_control(mincriterion = 0.70)); 418 | 419 | # Predictions 420 | predDT <- predict(TMDT, TM.test, type = "response"); 421 | perfDT <- table(TM.test$BikeBuyer, predDT, 422 | dnn = c("Actual", "Predicted")); 423 | perfDT; 424 | # Even better 425 | 426 | 427 | # ---------------------------------------------------- 428 | # -- Section 4: GGPlot 429 | # ---------------------------------------------------- 430 | 431 | install.packages("ggplot2"); 432 | library("ggplot2"); 433 | 434 | # Plots with count (number) Education by Region 435 | ggplot (TM, aes(Region, fill=Education)) + 436 | geom_bar(position = "stack"); 437 | 438 | ggplot (TM, aes(Region, fill=Education)) + 439 | geom_bar(position="fill"); 440 | 441 | ggplot (TM, aes(Region, fill=Education)) + 442 | geom_bar(position="dodge"); 443 | 444 | 445 | # A smaller data frame for the purpuse of graph 446 | TMLM <- TM[1:100, c("YearlyIncome", "Age")]; 447 | 448 | # Plot the data points 449 | plot(TMLM$Age, TMLM$YearlyIncome, 450 | cex = 2, col = "orange", lwd = 2); 451 | # Plots with ggplot 452 | # Basic 453 | ggplot(data = TMLM, aes(x=Age, y=YearlyIncome)) + 454 | geom_point(); 455 | 456 | # Plot with a Lowess line 457 | plot(TMLM$Age, TMLM$YearlyIncome, 458 | cex = 2, col = "orange", lwd = 2); 459 | lines(lowess(TMLM$Age, TMLM$YearlyIncome), 460 | col = "blue", lwd = 2); 461 | 462 | # With ggplot - linear + loess 463 | ggplot(data = TMLM, aes(x=Age, y=YearlyIncome)) + 464 | geom_point() + 465 | geom_smooth(method = "lm", color = "red") + 466 | geom_smooth(color = "blue"); 467 | 468 | 469 | # Boxplot 470 | boxplot(TM$YearlyIncome ~ TM$Education, 471 | main = "Yearly Income in Groups", 472 | notch = TRUE, 473 | varwidth = TRUE, 474 | col = "orange", 475 | ylab = "Yearly Income", 476 | xlab = "Education"); 477 | 478 | # Boxplot with ggplot 479 | ggplot(TM, aes (x = Education, y = YearlyIncome)) + 480 | geom_boxplot(fill = "orange", 481 | color = "blue", notch = TRUE); 482 | 483 | # Boxplot and violin plot with ggplot 484 | ggplot(TM, aes (x = Education, y = YearlyIncome)) + 485 | geom_violin(fill = "lightgreen") + 486 | geom_boxplot(fill = "orange", 487 | width = 0.2); 488 | 489 | # Density plot 490 | ggplot(TM, aes(x = YearlyIncome, fill = Education)) + 491 | geom_density(alpha = 0.3); 492 | 493 | # Trellis charts 494 | ggplot(TM, aes(x = NumberCarsOwned, fill = Region)) + 495 | geom_bar(stat = "bin") + 496 | facet_grid(MaritalStatus ~ BikeBuyer) + 497 | theme(text = element_text(size=30)); 498 | 499 | # More exaples 500 | ggplot(TM, aes(NumberCarsOwned) ) + 501 | geom_histogram() + 502 | facet_grid(MaritalStatus ~ .); 503 | 504 | ggplot(TM, aes(x = Education,y = BikeBuyer, fill = Region)) + 505 | geom_bar(stat = "identity") + 506 | facet_grid(. ~ Region) + 507 | theme(legend.position="none",axis.text.x=element_text(angle=45)); 508 | 509 | # End of script 510 | 511 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Packt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # SQL Server 2016 Developer's Guide 5 | This is the code repository for [SQL Server 2016 Developer's Guide](https://www.packtpub.com/big-data-and-business-intelligence/sql-server-2016-developer-guide?utm_source=github&utm_medium=repository&utm_campaign=9781786465344), published by [Packt](https://www.packtpub.com/?utm_source=github). It contains all the supporting project files necessary to work through the book from start to finish. 6 | ## About the Book 7 | Microsoft SQL Server 2016 is considered the biggest leap in the data platform history of the Microsoft, in the ongoing era of Big Data and data science. Compared to its predecessors, SQL Server 2016 offers developers a unique opportunity to leverage the advanced features and build applications that are robust, scalable, and easy to administer. 8 | ## Instructions and Navigation 9 | All of the code is organized into folders. Each folder starts with a number followed by the application name. For example, Chapter02. 10 | 11 | 12 | 13 | The code will look like the following: 14 | ``` 15 | EXEC dbo.InsertSimpleOrder 16 | @OrderId = 5, @OrderDate = '20160702', @Customer = N'CustA'; 17 | EXEC dbo.InsertSimpleOrderDetail 18 | @OrderId = 5, @ProductId = 1, @Quantity = 50; 19 | ``` 20 | 21 | In order to run all of the demo code in this book, you will need SQL Server 2016 Developer 22 | or Enterprise Edition. In addition, you will extensively use SQL Server Management Studio. 23 | You will also need the RStudio IDE and/or SQL Server Data Tools with R Tools for Visual 24 | Studio plug-in. 25 | 26 | ## Related Products 27 | * [SQL Server Analysis Services 2012 Cube Development Cookbook](https://www.packtpub.com/big-data-and-business-intelligence/sql-server-analysis-services-2012-cube-development-cookbook?utm_source=github&utm_medium=repository&utm_campaign=9781849689809) 28 | 29 | * [SQL Server 2014 Development Essentials](https://www.packtpub.com/networking-and-servers/sql-server-2014-development-essentials?utm_source=github&utm_medium=repository&utm_campaign=9781782172550) 30 | 31 | * [Getting Started with SQL Server 2012 Cube Development](https://www.packtpub.com/big-data-and-business-intelligence/getting-started-sql-server-2012-cube-development?utm_source=github&utm_medium=repository&utm_campaign=9781849689502) 32 | 33 | ### Suggestions and Feedback 34 | [Click here](https://docs.google.com/forms/d/e/1FAIpQLSe5qwunkGf6PUvzPirPDtuy1Du5Rlzew23UBp2S-P3wB-GcwQ/viewform) if you have any feedback or suggestions. 35 | ### Download a free PDF 36 | 37 | If you have already purchased a print or Kindle version of this book, you can get a DRM-free PDF version at no cost.
Simply click on the link to claim your free PDF.
38 |

https://packt.link/free-ebook/9781786465344

--------------------------------------------------------------------------------