├── .github └── FUNDING.yml ├── .gitignore ├── ADF.procfwk - The 300! Scaled Out Worker Pipeline Test ├── Create 300 Workers.ps1 ├── Function Metrics.png ├── Portal Screen Shot 1.png ├── Portal Screen Shot 2.png ├── Portal Screen Shot 3.png ├── Procfwk 300 Image.png ├── Procfwk PowerBI 300 Same Waits.png ├── Procfwk PowerBI 300.png ├── Query Current Execution.png ├── Query Current Execution.sql ├── TemplatePipeline.json └── Update Metadata.sql ├── About └── Paul Andrew CV.pdf ├── Azure Data Factory - Pipeline Hierarchies (Generation Control) ├── Mock Up Pipelines.pptx ├── Summary.pdf └── adf-gpc-pipelines.png ├── Azure Synapse Analytics - Feature Summary vs Status ├── Synapse Summary - 10-02-2021.pdf ├── Synapse Summary - 10-02-2021.png ├── Synapse Summary - 28-05-2021.pdf ├── Synapse Summary - 28-05-2021.png └── Synapse Summary.pptx ├── Best Practices for Implementing Azure Data Factory - Auto Checker Script v0.1 ├── ADF Checker Output v0.1.png └── ADF Checker Script v0.1.ps1 ├── Best Practices for Implementing Azure Data Factory └── Security Custom Roles │ ├── ADFPipelineExecutor.json │ └── ADFReader.json ├── Building a Data Mesh Architecture in Azure ├── Beard Icon.jpg ├── Data Domains View 1.png ├── Data Domains View 2.png ├── Data Mesh Roadmap Starting Point.pdf ├── Data Mesh Roadmap.png ├── Data Mesh Role Profiles.png ├── Data Mesh with Infrastructure Wrappers.png ├── Data Model Output Blot On.png ├── Data Product Awards.png ├── Data Product Venn Definition.png ├── Data Product Venn Starting Point.png ├── Diagrams.vsdx ├── Edges Primary.png ├── Edges Secondary.png ├── Edges Tertiary Op1.png ├── Edges Tertiary Op2.png ├── Edges Tertiary Op3.png ├── Edges Tertiary.png ├── Email to David Vellante.png ├── Feature Image P1.png ├── Feature Image P10.png ├── Feature Image P11.png ├── Feature Image P12.png ├── Feature Image P2.png ├── Feature Image P3.png ├── Feature Image P4.png ├── Feature Image P5.png ├── Feature Image P6.png ├── Feature Image P7.png ├── Feature Image P8.png ├── Feature Image P9.png ├── Generic Platform to Product Approach.png ├── Message to Zhamak.png ├── Multi Plane Components.png ├── Node with Edges.png ├── Nodes as Resource Groups.png ├── Nodes as Subscriptions.png ├── Nodes with Edges.png ├── Nodes with Limited Edges.png ├── Not Data Product Venn.png ├── Platform Wrappers Infrastructure.png ├── Platform Wrappers Initial.png ├── Practice with SaaS Plane.png ├── Practice with Side IaaS Paas Planes.png ├── Practice with Side IaaS Paas SaaS Planes.png ├── Practice.png ├── SaaS Plane.png ├── Slides.pptx ├── Theory - Planes.png ├── Theory vs Practice Slide.png ├── Theory.png └── Zhamak Dehghani.jpg ├── Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines ├── Data Factory │ └── ARM ADF procfwk v1.json ├── Images │ ├── ADF procfwk Whiteboard.jpg │ ├── ADFprocfwk Graphic.png │ ├── All ADF Activities - With Hierarchy.png │ ├── All ADF Activities.png │ ├── Create Database.png │ ├── Create Schema.png │ ├── Framework Bootstrap Inner ForEach.png │ ├── Framework Bootstrap Pipeline.png │ ├── Framework Stage Executor Inner ForEach.png │ ├── Framework Stage Executor.png │ ├── Function Activity Settings.png │ ├── Get Pipeline Parameters SQL Snippet.png │ ├── Grandparent Pipeline.png │ ├── Last Execution Query.png │ ├── Proc Preview.png │ ├── Sample Metadata.png │ ├── Wait Example.png │ └── prcfwk database diagram.png ├── SQLDB │ ├── Database and Schema │ │ ├── Create Database.sql │ │ └── Create Schema.sql │ ├── Procedures │ │ ├── procfwk.CreateNewExecution.StoredProcedure.sql │ │ ├── procfwk.GetPipelineParameters.StoredProcedure.sql │ │ ├── procfwk.GetPipelinesInStage.StoredProcedure.sql │ │ ├── procfwk.GetProcessStages.StoredProcedure.sql │ │ ├── procfwk.SetLogPipelineFailed.StoredProcedure.sql │ │ ├── procfwk.SetLogPipelineRunning.StoredProcedure.sql │ │ ├── procfwk.SetLogPipelineSuccess.StoredProcedure.sql │ │ ├── procfwk.SetLogStageStart.StoredProcedure.sql │ │ └── procfwk.UpdateExecutionLog.StoredProcedure.sql │ ├── Sample Data │ │ ├── Data.sql │ │ └── Select Queries.sql │ └── Tables │ │ ├── procfwk.CurrentExecution.Table.sql │ │ ├── procfwk.ExecutionLog.Table.sql │ │ ├── procfwk.PipelineParameters.Table.sql │ │ ├── procfwk.PipelineProcesses.Table.sql │ │ └── procfwk.ProcessingStageDetails.Table.sql └── Select Last Execution.sql ├── Creating an Azure Data Factory v2 Custom Activity ├── ADF Custom Activity.png ├── Batch Pool Auto Scale Formula.txt ├── Example CSV Data to Clean.csv ├── Example Custom Logger Class.cs ├── Local Reference Objects.cs ├── Parse Reference Objects.cs └── Upload Console App To Blob Store.ps1 ├── Execute Any Azure Data Factory Pipeline with an Azure Function ├── .vs │ └── PipelineExecutor │ │ ├── DesignTimeBuild │ │ └── .dtbcache │ │ └── v16 │ │ ├── .suo │ │ └── Server │ │ └── sqlite3 │ │ ├── db.lock │ │ └── storage.ide ├── PipelineExecutor.sln ├── PipelineExecutor │ ├── .gitignore │ ├── ExecutePipeline.cs │ ├── PipelineExecutor.csproj │ ├── Properties │ │ └── PublishProfiles │ │ │ └── PipelineExecutor - Zip Deploy.pubxml │ └── host.json └── Visual.pptx ├── First Time Playing with Spark.Net on Azure Synapse Analytics ├── Images │ ├── Synapse Compute Pools.png │ ├── Synapse Home.png │ ├── Synapse Notebook Attached and Language.png │ ├── Synapse Spark.Net Notebook.png │ ├── Synapse Storage Explorer.png │ ├── Synapse Storage New Notebook Result.png │ ├── Synapse Storage New Notebook.png │ ├── Synaspe Pipeline.png │ └── Visual Studio SparkNet NuGet.png └── PaulsNotebook.ipynb ├── Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions └── Get Error Details │ ├── .vs │ └── Get Error Details │ │ ├── DesignTimeBuild │ │ └── .dtbcache.v2 │ │ └── v16 │ │ └── .suo │ ├── Get Error Details.sln │ └── Get Error Details │ ├── .gitignore │ ├── Get Error Details.csproj │ ├── GetActivityErrorDetails.cs │ ├── JavaScriptSerializer.cs │ └── host.json ├── Get Any Azure Data Factory Pipeline Run Status with Azure Functions ├── Filter Factory Code Snippet.png ├── PipelineStatusChecker │ ├── .vs │ │ └── PipelineStatusChecker │ │ │ ├── DesignTimeBuild │ │ │ ├── .dtbcache │ │ │ └── .dtbcache.v2 │ │ │ ├── v15 │ │ │ ├── .suo │ │ │ └── Server │ │ │ │ └── sqlite3 │ │ │ │ ├── db.lock │ │ │ │ ├── storage.ide │ │ │ │ ├── storage.ide-shm │ │ │ │ └── storage.ide-wal │ │ │ └── v16 │ │ │ ├── .suo │ │ │ └── Server │ │ │ └── sqlite3 │ │ │ ├── db.lock │ │ │ └── storage.ide │ ├── PipelineStatusChecker.sln │ └── PipelineStatusChecker │ │ ├── .gitignore │ │ ├── Functions.cs │ │ ├── PipelineStatusChecker.csproj │ │ ├── Properties │ │ └── PublishProfiles │ │ │ └── PipelineStatusChecker - Zip Deploy.pubxml │ │ └── host.json └── Visual.pptx ├── Get Data Factory to Check Itself for a Running Pipeline via the Azure Management API ├── Pipeline.json └── Social Media Image.png ├── How To Use 'Specify dynamic contents in JSON format' in Azure Data Factory Linked Services └── Dynamic Key Vault Linked Service.json ├── Idea for Self Service Using Azure Synapse Analytics ├── Data Consumption Dashboards.pbix ├── Images.vsdx ├── Notebook.ipynb └── pipeline.json ├── Interacting with SQL Server 2019 Big Data Clusters ├── Cluster Admin.png ├── Data Studio - Master Instance.png ├── Data Studio - Notebooks.png ├── Grafana Node Dashboard.png ├── Grafana SQL Dashboard.png ├── Icon.png ├── Kubernetes Pods.png ├── SSMS.png └── Spark Server Logs.png ├── PowerShell Export Databricks Workspace Items - Recurse └── Export Databricks Workspace Items Recurse.ps1 ├── README.md ├── Scaling Azure Data Integration Pipelines With Regional Data Extraction And Central Processing ├── Data Pipelines Scaled.pdf └── Data Pipelines Scaled.png ├── Structuring Your Databricks Notebooks with Markdown, Titles, Widgets and Comments ├── Databricks Icon.png ├── Notebook Example.png ├── Notebook Example.scala └── Notebook Structure Example.html ├── Summarise my Azure Data Factory ARM Template Using T-SQL ├── Create Table.sql └── Parse JSON.sql ├── The Microsoft Inteligent Data Platform ├── Feature Picture.png ├── Icon Timeline.png ├── Learning Synapse ER Feature Image.png └── Time Line.pptx ├── Thinking About an Azure Synapse Analytics Physical Architecture ├── ASA Physical Architecture v1.svg └── Azure Synapse Physical Architecture v1.jpg ├── Trying to Deploy Azure Synapse Analytics Using ARM Templates ├── Synapse ARM Template Bug.png ├── Synapse Artifacts.png ├── Synapse Git Config.png ├── Synapse Release.png └── Vanilla Workspace Template.json └── Using Data Factory Parameterised Linked Services ├── DSWithParams.png ├── Drawing1.vsdx ├── Drop All Target DB FKs.sql ├── GenericSQLDB.json ├── GenericSQLTable.json ├── Get Tables Query.sql ├── Linked Service Param Flow.png ├── LinkedServiceParams.png ├── Metadata Driven Pipeline.json ├── Metadata Driven Pipeline.png ├── Pipeline Level Params.png ├── Pipeline at Runtime.png ├── Scenario Image.png ├── Simple Pipeline.json └── Stored Proc Settings.png /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [mrpaulandrew] 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | Building a Data Mesh Architecture in Azure/~$Slides.pptx 3 | -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Create 300 Workers.ps1: -------------------------------------------------------------------------------- 1 | # Set global variables as required: 2 | $resourceGroupName = "ADF.procfwk" 3 | $dataFactoryName = "WorkersFactory" 4 | 5 | #SPN for deploying ADF: 6 | $tenantId = [System.Environment]::GetEnvironmentVariable('AZURE_TENANT_ID') 7 | $spId = [System.Environment]::GetEnvironmentVariable('AZURE_CLIENT_ID') 8 | $spKey = [System.Environment]::GetEnvironmentVariable('AZURE_CLIENT_SECRET') 9 | 10 | #Modules 11 | Import-Module -Name "Az" 12 | #Update-Module -Name "Az" 13 | 14 | Import-Module -Name "Az.DataFactory" 15 | #Update-Module -Name "Az.DataFactory" 16 | 17 | # Login as a Service Principal 18 | $passwd = ConvertTo-SecureString $spKey -AsPlainText -Force 19 | $pscredential = New-Object System.Management.Automation.PSCredential($spId, $passwd) 20 | Connect-AzAccount -ServicePrincipal -Credential $pscredential -TenantId $tenantId | Out-Null 21 | 22 | #Create array of 300 items 23 | $a = 1..300 24 | 25 | #Template pipeline 26 | $scriptPath = (Get-Item -Path ".\").FullName #+ "\Desktop\Temp\" 27 | $deploymentFilePath = $scriptPath + "\TemplatePipeline.json" 28 | $body = (Get-Content -Path $deploymentFilePath | Out-String) 29 | $json = $body | ConvertFrom-Json 30 | 31 | #Deploy pipelines 32 | foreach ($element in $a) { 33 | 34 | $pipelineName = "Wait " + $element.ToString() 35 | $json.name = $pipelineName 36 | 37 | Write-Host "Deploying pipeline... "$pipelineName 38 | 39 | New-AzResource ` 40 | -ResourceType 'Microsoft.DataFactory/factories/pipelines' ` 41 | -ResourceGroupName $resourceGroupName ` 42 | -Name "$dataFactoryName/$pipelineName" ` 43 | -ApiVersion "2018-06-01" ` 44 | -Properties $json ` 45 | -IsFullObject -Force | Out-Null 46 | } 47 | -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Function Metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Function Metrics.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Portal Screen Shot 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Portal Screen Shot 1.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Portal Screen Shot 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Portal Screen Shot 2.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Portal Screen Shot 3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Portal Screen Shot 3.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Procfwk 300 Image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Procfwk 300 Image.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Procfwk PowerBI 300 Same Waits.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Procfwk PowerBI 300 Same Waits.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Procfwk PowerBI 300.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Procfwk PowerBI 300.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Query Current Execution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Query Current Execution.png -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Query Current Execution.sql: -------------------------------------------------------------------------------- 1 | SELECT [StageId],[PipelineStatus],COUNT(0) FROM [procfwk].[CurrentExecution] WITH (READPAST) GROUP BY [StageId],[PipelineStatus] 2 | 3 | SELECT * FROM [procfwk].[CurrentExecution] WITH (NOLOCK) -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/TemplatePipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "PipelineName", 3 | "properties": { 4 | "description": "Perform test worker.", 5 | "activities": [ 6 | { 7 | "name": "JustWait", 8 | "type": "Wait", 9 | "dependsOn": [], 10 | "userProperties": [], 11 | "typeProperties": { 12 | "waitTimeInSeconds": { 13 | "value": "@pipeline().parameters.WaitTime", 14 | "type": "Expression" 15 | } 16 | } 17 | } 18 | ], 19 | "parameters": { 20 | "WaitTime": { 21 | "type": "int", 22 | "defaultValue": 5 23 | } 24 | }, 25 | "folder": { 26 | "name": "_Workers" 27 | }, 28 | "annotations": [ 29 | "_Workers" 30 | ] 31 | } 32 | } -------------------------------------------------------------------------------- /ADF.procfwk - The 300! Scaled Out Worker Pipeline Test/Update Metadata.sql: -------------------------------------------------------------------------------- 1 | --update factory name 2 | UPDATE [procfwk].[DataFactorys] SET [DataFactoryName] = 'FrameworkFactoryTest' 3 | 4 | --clear the decks 5 | DELETE FROM [procfwk].[PipelineAlertLink]; 6 | DBCC CHECKIDENT ('[procfwk].[PipelineAlertLink]', RESEED, 0); 7 | 8 | DELETE FROM [procfwk].[Recipients]; 9 | DBCC CHECKIDENT ('[procfwk].[Recipients]', RESEED, 0); 10 | 11 | DELETE FROM [procfwk].[PipelineAuthLink]; 12 | DBCC CHECKIDENT ('[procfwk].[PipelineAuthLink]', RESEED, 0); 13 | 14 | DELETE FROM [dbo].[ServicePrincipals]; 15 | DBCC CHECKIDENT ('[dbo].[ServicePrincipals]', RESEED, 0); 16 | 17 | DELETE FROM [procfwk].[PipelineParameters]; 18 | DBCC CHECKIDENT ('[procfwk].[PipelineParameters]', RESEED, 0); 19 | 20 | DELETE FROM [procfwk].[Pipelines]; 21 | DBCC CHECKIDENT ('[procfwk].[Pipelines]', RESEED, 0); 22 | 23 | --get data factory id 24 | DECLARE @ADFId INT = (SELECT [DataFactoryId] FROM [procfwk].[DataFactorys] WHERE [DataFactoryName] = 'FrameworkFactoryTest') 25 | 26 | --insert 300 pipelines 27 | ;WITH cte AS 28 | ( 29 | SELECT TOP 300 30 | ROW_NUMBER() OVER (ORDER BY s1.[object_id]) AS 'Number' 31 | FROM 32 | sys.all_columns AS s1 33 | CROSS JOIN sys.all_columns AS s2 34 | ) 35 | INSERT INTO [procfwk].[Pipelines] 36 | ( 37 | [DataFactoryId], 38 | [StageId], 39 | [PipelineName], 40 | [LogicalPredecessorId], 41 | [Enabled] 42 | ) 43 | SELECT 44 | @ADFId, 45 | CASE 46 | WHEN [Number] <= 100 THEN 1 47 | WHEN [Number] > 100 AND [Number] <= 200 THEN 2 48 | WHEN [Number] > 200 AND [Number] <= 300 THEN 3 49 | END, 50 | 'Wait ' + CAST([Number] AS VARCHAR), 51 | NULL, 52 | 1 53 | FROM 54 | cte; 55 | 56 | --disable other execution stages 57 | UPDATE [procfwk].[Stages] SET [Enabled] = 0 WHERE [StageId] > 3; 58 | 59 | --insert 300 pipeline parameters 60 | INSERT INTO [procfwk].[PipelineParameters] 61 | ( 62 | [PipelineId], 63 | [ParameterName], 64 | [ParameterValue] 65 | ) 66 | SELECT 67 | [PipelineId], 68 | 'WaitTime', 69 | LEFT(ABS(CAST(CAST(NEWID() AS VARBINARY) AS INT)),2) 70 | FROM 71 | [procfwk].[Pipelines]; 72 | 73 | --add SPN (SQLCMD mode) 74 | EXEC [procfwk].[AddServicePrincipal] 75 | @DataFactory = N'FrameworkFactoryTest', 76 | @PrincipalId = '$(AZURE_CLIENT_ID)', 77 | @PrincipalSecret = '$(AZURE_CLIENT_SECRET)', 78 | @PrincipalName = '$(AZURE_CLIENT_NAME)' 79 | 80 | -------------------------------------------------------------------------------- /About/Paul Andrew CV.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/About/Paul Andrew CV.pdf -------------------------------------------------------------------------------- /Azure Data Factory - Pipeline Hierarchies (Generation Control)/Mock Up Pipelines.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Data Factory - Pipeline Hierarchies (Generation Control)/Mock Up Pipelines.pptx -------------------------------------------------------------------------------- /Azure Data Factory - Pipeline Hierarchies (Generation Control)/Summary.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Data Factory - Pipeline Hierarchies (Generation Control)/Summary.pdf -------------------------------------------------------------------------------- /Azure Data Factory - Pipeline Hierarchies (Generation Control)/adf-gpc-pipelines.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Data Factory - Pipeline Hierarchies (Generation Control)/adf-gpc-pipelines.png -------------------------------------------------------------------------------- /Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 10-02-2021.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 10-02-2021.pdf -------------------------------------------------------------------------------- /Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 10-02-2021.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 10-02-2021.png -------------------------------------------------------------------------------- /Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 28-05-2021.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 28-05-2021.pdf -------------------------------------------------------------------------------- /Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 28-05-2021.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary - 28-05-2021.png -------------------------------------------------------------------------------- /Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Azure Synapse Analytics - Feature Summary vs Status/Synapse Summary.pptx -------------------------------------------------------------------------------- /Best Practices for Implementing Azure Data Factory - Auto Checker Script v0.1/ADF Checker Output v0.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Best Practices for Implementing Azure Data Factory - Auto Checker Script v0.1/ADF Checker Output v0.1.png -------------------------------------------------------------------------------- /Best Practices for Implementing Azure Data Factory/Security Custom Roles/ADFPipelineExecutor.json: -------------------------------------------------------------------------------- 1 | { 2 | "Name": "ADF Pipeline Executor", 3 | "IsCustom": true, 4 | "Description": "Can start an ADF Pipeline Run", 5 | "Actions": [ 6 | "Microsoft.DataFactory/factories/pipelines/createrun/action", 7 | "*/read" 8 | ], 9 | "NotActions": [], 10 | "AssignableScopes": [ 11 | "/subscriptions/" 12 | ] 13 | } -------------------------------------------------------------------------------- /Best Practices for Implementing Azure Data Factory/Security Custom Roles/ADFReader.json: -------------------------------------------------------------------------------- 1 | { 2 | "Name": "ADF Reader", 3 | "IsCustom": true, 4 | "Description": "Can read all Data Factory resources", 5 | "Actions": [ 6 | "Microsoft.DataFactory/operations/read", 7 | "Microsoft.DataFactory/checkazuredatafactorynameavailability/read", 8 | "Microsoft.DataFactory/locations/getFeatureValue/read", 9 | "Microsoft.DataFactory/factories/triggers/triggerruns/read", 10 | "Microsoft.DataFactory/factories/triggers/read", 11 | "Microsoft.DataFactory/factories/pipelines/pipelineruns/activityruns/progress/read", 12 | "Microsoft.DataFactory/factories/pipelines/pipelineruns/read", 13 | "Microsoft.DataFactory/factories/pipelines/read", 14 | "Microsoft.DataFactory/factories/privateEndpointConnectionProxies/operationstatuses/read", 15 | "Microsoft.DataFactory/factories/privateEndpointConnectionProxies/operationresults/read", 16 | "Microsoft.DataFactory/factories/privateEndpointConnectionProxies/read", 17 | "Microsoft.DataFactory/factories/linkedServices/read", 18 | "Microsoft.DataFactory/factories/integrationruntimes/nodes/ipAddress/action", 19 | "Microsoft.DataFactory/factories/integrationruntimes/nodes/read", 20 | "Microsoft.DataFactory/factories/integrationruntimes/monitoringdata/read", 21 | "Microsoft.DataFactory/factories/integrationruntimes/getstatus/read", 22 | "Microsoft.DataFactory/factories/integrationruntimes/getconnectioninfo/action", 23 | "Microsoft.DataFactory/factories/integrationruntimes/read", 24 | "Microsoft.DataFactory/factories/sandboxpipelineruns/sandboxActivityRuns/read", 25 | "Microsoft.DataFactory/factories/sandboxpipelineruns/read", 26 | "Microsoft.DataFactory/factories/datasets/read", 27 | "Microsoft.DataFactory/factories/dataflows/read", 28 | "Microsoft.DataFactory/factories/triggerruns/read", 29 | "Microsoft.DataFactory/factories/querytriggerruns/read", 30 | "Microsoft.DataFactory/factories/querypipelineruns/read", 31 | "Microsoft.DataFactory/factories/pipelineruns/queryactivityruns/read", 32 | "Microsoft.DataFactory/factories/pipelineruns/activityruns/read", 33 | "Microsoft.DataFactory/factories/pipelineruns/read", 34 | "Microsoft.DataFactory/factories/operationResults/read", 35 | "Microsoft.DataFactory/factories/getFeatureValue/read", 36 | "Microsoft.DataFactory/factories/getDataPlaneAccess/read", 37 | "Microsoft.DataFactory/factories/read", 38 | "Microsoft.DataFactory/datafactories/runs/loginfo/read", 39 | "Microsoft.DataFactory/datafactories/datapipelines/activitywindows/read", 40 | "Microsoft.DataFactory/datafactories/datapipelines/activities/activitywindows/read", 41 | "Microsoft.DataFactory/datafactories/datapipelines/read", 42 | "Microsoft.DataFactory/datafactories/linkedServices/read", 43 | "Microsoft.DataFactory/datafactories/gateways/connectioninfo/action", 44 | "Microsoft.DataFactory/datafactories/gateways/read", 45 | "Microsoft.DataFactory/datafactories/tables/read", 46 | "Microsoft.DataFactory/datafactories/datasets/slices/read", 47 | "Microsoft.DataFactory/datafactories/datasets/sliceruns/read", 48 | "Microsoft.DataFactory/datafactories/datasets/activitywindows/read", 49 | "Microsoft.DataFactory/datafactories/datasets/read", 50 | "Microsoft.DataFactory/datafactories/activitywindows/read", 51 | "Microsoft.DataFactory/datafactories/read" 52 | ], 53 | "NotActions": [], 54 | "AssignableScopes": [ 55 | "/subscriptions/" 56 | ] 57 | } -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Beard Icon.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Beard Icon.jpg -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Domains View 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Domains View 1.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Domains View 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Domains View 2.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Mesh Roadmap Starting Point.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Mesh Roadmap Starting Point.pdf -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Mesh Roadmap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Mesh Roadmap.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Mesh Role Profiles.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Mesh Role Profiles.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Mesh with Infrastructure Wrappers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Mesh with Infrastructure Wrappers.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Model Output Blot On.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Model Output Blot On.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Product Awards.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Product Awards.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Product Venn Definition.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Product Venn Definition.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Data Product Venn Starting Point.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Data Product Venn Starting Point.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Diagrams.vsdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Diagrams.vsdx -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Edges Primary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Edges Primary.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Edges Secondary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Edges Secondary.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Edges Tertiary Op1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Edges Tertiary Op1.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Edges Tertiary Op2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Edges Tertiary Op2.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Edges Tertiary Op3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Edges Tertiary Op3.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Edges Tertiary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Edges Tertiary.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Email to David Vellante.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Email to David Vellante.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P1.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P10.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P11.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P12.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P2.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P3.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P4.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P5.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P6.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P7.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P8.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Feature Image P9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Feature Image P9.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Generic Platform to Product Approach.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Generic Platform to Product Approach.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Message to Zhamak.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Message to Zhamak.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Multi Plane Components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Multi Plane Components.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Node with Edges.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Node with Edges.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Nodes as Resource Groups.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Nodes as Resource Groups.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Nodes as Subscriptions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Nodes as Subscriptions.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Nodes with Edges.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Nodes with Edges.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Nodes with Limited Edges.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Nodes with Limited Edges.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Not Data Product Venn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Not Data Product Venn.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Platform Wrappers Infrastructure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Platform Wrappers Infrastructure.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Platform Wrappers Initial.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Platform Wrappers Initial.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Practice with SaaS Plane.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Practice with SaaS Plane.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Practice with Side IaaS Paas Planes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Practice with Side IaaS Paas Planes.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Practice with Side IaaS Paas SaaS Planes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Practice with Side IaaS Paas SaaS Planes.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Practice.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Practice.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/SaaS Plane.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/SaaS Plane.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Slides.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Slides.pptx -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Theory - Planes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Theory - Planes.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Theory vs Practice Slide.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Theory vs Practice Slide.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Theory.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Theory.png -------------------------------------------------------------------------------- /Building a Data Mesh Architecture in Azure/Zhamak Dehghani.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Building a Data Mesh Architecture in Azure/Zhamak Dehghani.jpg -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/ADF procfwk Whiteboard.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/ADF procfwk Whiteboard.jpg -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/ADFprocfwk Graphic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/ADFprocfwk Graphic.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/All ADF Activities - With Hierarchy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/All ADF Activities - With Hierarchy.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/All ADF Activities.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/All ADF Activities.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Create Database.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Create Database.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Create Schema.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Create Schema.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Bootstrap Inner ForEach.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Bootstrap Inner ForEach.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Bootstrap Pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Bootstrap Pipeline.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Stage Executor Inner ForEach.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Stage Executor Inner ForEach.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Stage Executor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Framework Stage Executor.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Function Activity Settings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Function Activity Settings.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Get Pipeline Parameters SQL Snippet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Get Pipeline Parameters SQL Snippet.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Grandparent Pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Grandparent Pipeline.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Last Execution Query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Last Execution Query.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Proc Preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Proc Preview.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Sample Metadata.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Sample Metadata.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Wait Example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/Wait Example.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/prcfwk database diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Images/prcfwk database diagram.png -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Database and Schema/Create Database.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE [MetaData] 2 | ( 3 | EDITION = 'Standard', 4 | SERVICE_OBJECTIVE = 'S2', 5 | MAXSIZE = 20 GB 6 | ) WITH CATALOG_COLLATION = SQL_Latin1_General_CP1_CI_AS; 7 | GO 8 | -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Database and Schema/Create Schema.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Database and Schema/Create Schema.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.CreateNewExecution.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.CreateNewExecution.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.GetPipelineParameters.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.GetPipelineParameters.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.GetPipelinesInStage.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.GetPipelinesInStage.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.GetProcessStages.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.GetProcessStages.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogPipelineFailed.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogPipelineFailed.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogPipelineRunning.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogPipelineRunning.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogPipelineSuccess.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogPipelineSuccess.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogStageStart.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.SetLogStageStart.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.UpdateExecutionLog.StoredProcedure.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Procedures/procfwk.UpdateExecutionLog.StoredProcedure.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Sample Data/Data.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Sample Data/Data.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Sample Data/Select Queries.sql: -------------------------------------------------------------------------------- 1 | SELECT * FROM [procfwk].[ProcessingStageDetails] 2 | SELECT * FROM [procfwk].[PipelineProcesses] 3 | SELECT * FROM [procfwk].[PipelineParameters] 4 | 5 | -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.CurrentExecution.Table.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.CurrentExecution.Table.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.ExecutionLog.Table.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.ExecutionLog.Table.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.PipelineParameters.Table.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.PipelineParameters.Table.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.PipelineProcesses.Table.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.PipelineProcesses.Table.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.ProcessingStageDetails.Table.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/SQLDB/Tables/procfwk.ProcessingStageDetails.Table.sql -------------------------------------------------------------------------------- /Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines/Select Last Execution.sql: -------------------------------------------------------------------------------- 1 | WITH maxLog AS 2 | ( 3 | SELECT 4 | MAX([LogId]) AS 'MaxLogId' 5 | FROM 6 | [procfwk].[ExecutionLog] 7 | ), 8 | lastExecutionId AS 9 | ( 10 | SELECT 11 | [LocalExecutionId] 12 | FROM 13 | [procfwk].[ExecutionLog] el1 14 | INNER JOIN maxLog 15 | ON maxLog.[MaxLogId] = el1.[LogId] 16 | ) 17 | SELECT 18 | el2.[LogId], 19 | el2.[StageId], 20 | el2.[PipelineId], 21 | el2.[PipelineName], 22 | el2.[StartDateTime], 23 | el2.[PipelineStatus], 24 | el2.[EndDateTime] 25 | FROM 26 | [procfwk].[ExecutionLog] el2 27 | INNER JOIN lastExecutionId 28 | ON el2.[LocalExecutionId] = lastExecutionId.[LocalExecutionId] 29 | ORDER BY 30 | el2.[StageId], 31 | el2.[PipelineId] 32 | 33 | 34 | -------------------------------------------------------------------------------- /Creating an Azure Data Factory v2 Custom Activity/ADF Custom Activity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Creating an Azure Data Factory v2 Custom Activity/ADF Custom Activity.png -------------------------------------------------------------------------------- /Creating an Azure Data Factory v2 Custom Activity/Batch Pool Auto Scale Formula.txt: -------------------------------------------------------------------------------- 1 | startingNumberOfVMs = 1; maxNumberofVMs = 25; pendingTaskSamplePercent = $PendingTasks.GetSamplePercent(180 * TimeInterval_Second); pendingTaskSamples = pendingTaskSamplePercent < 70 ? startingNumberOfVMs : avg($PendingTasks.GetSample(180 * TimeInterval_Second)); $TargetDedicatedNodes=min(maxNumberofVMs, pendingTaskSamples); -------------------------------------------------------------------------------- /Creating an Azure Data Factory v2 Custom Activity/Example CSV Data to Clean.csv: -------------------------------------------------------------------------------- 1 | Field1,Fiel2,Field3,Field4 2 | "abc","abc","abc","abc" 3 | "abc","ab 4 | c","a 5 | bc","abc" 6 | "abc","abc","abc","abc" 7 | 8 | -------------------------------------------------------------------------------- /Creating an Azure Data Factory v2 Custom Activity/Example Custom Logger Class.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Linq; 4 | using System.Text; 5 | using System.Threading.Tasks; 6 | 7 | namespace CustomActivityApp 8 | { 9 | public class CustomLogger 10 | { 11 | public void StdOut (string message) 12 | { 13 | string timeStamp = DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss"); 14 | string logLine = timeStamp + "|" + message; 15 | 16 | Console.WriteLine(logLine); 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /Creating an Azure Data Factory v2 Custom Activity/Local Reference Objects.cs: -------------------------------------------------------------------------------- 1 | //batch variables 2 | //https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables 3 | 4 | string workingDir = Environment.GetEnvironmentVariable("AZ_BATCH_TASK_WORKING_DIR"); 5 | string nodeSharedDir = Environment.GetEnvironmentVariable("AZ_BATCH_NODE_SHARED_DIR"); 6 | 7 | //local paths 8 | string path = Directory.GetCurrentDirectory(); 9 | 10 | //for local debugging: 11 | if (workingDir == null) 12 | { 13 | workingDir = Path.GetFullPath(Path.Combine(path, @"..\..\")) + "ReferenceObjects"; 14 | } 15 | if (nodeSharedDir == null) 16 | { 17 | nodeSharedDir = Path.GetFullPath(Path.Combine(path, @"..\..\")) + "Shared"; 18 | } -------------------------------------------------------------------------------- /Creating an Azure Data Factory v2 Custom Activity/Parse Reference Objects.cs: -------------------------------------------------------------------------------- 1 | if (File.Exists(workingDir + "\\" + linkedServiceFile)) 2 | { 3 | linkedServices = JsonConvert.DeserializeObject(File.ReadAllText(workingDir + "\\" + linkedServiceFile)); 4 | 5 | int links = linkedServices.Count; 6 | for (int i = 0; i < links; i++) 7 | { 8 | if (linkedServices[i].properties.type.ToString() == "AzureDataLakeStore" && keyType == null) 9 | { 10 | dataLakeStoreUri = linkedServices[i].properties.typeProperties.dataLakeStoreUri.ToString(); 11 | servicePrincipalId = linkedServices[i].properties.typeProperties.servicePrincipalId.ToString(); 12 | servicePrincipalKey = linkedServices[i].properties.typeProperties.servicePrincipalKey.ToString(); 13 | tenantId = linkedServices[i].properties.typeProperties.tenant.ToString(); 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /Creating an Azure Data Factory v2 Custom Activity/Upload Console App To Blob Store.ps1: -------------------------------------------------------------------------------- 1 |  2 | 3 | # Variables: 4 | $storageAccountName = "" 5 | $containerName = "" 6 | $storageAccountKey = "" 7 | 8 | # Find the local folder where this PowerShell script is stored. 9 | $scriptPath = $PSScriptRoot 10 | $projectFolder = (get-item $scriptPath ).parent.FullName + "\bin\Debug\" 11 | 12 | $files = Get-ChildItem $projectFolder | where { !$_.PSIsContainer -and $_ -notlike "*.json" } #not folders or JSON files 13 | 14 | #create blob context with key 15 | $blobContext = New-AzureStorageContext -StorageAccountName $storageAccountName -StorageAccountKey $storageAccountKey 16 | 17 | #remove current container contents 18 | Write-Host "Removing current container contents." 19 | 20 | Get-AzureStorageBlob ` 21 | -Container $containerName ` 22 | -blob * ` 23 | -Context $blobContext | ForEach-Object {Remove-AzureStorageBlob -Blob $_.Name -Container $containerName -Context $blobContext} 24 | 25 | Write-Host "----------------------------------------" 26 | 27 | Sleep -Seconds 5 28 | 29 | #upload files 30 | Write-Host "Uploading files." 31 | 32 | foreach ($file in $files) 33 | { 34 | $fileName = "$projectFolder\$file" 35 | $blobName = "$destfolder/$file" 36 | 37 | Write-Host "Uploading" $file 38 | 39 | Set-AzureStorageBlobContent ` 40 | -File $filename ` 41 | -Container $containerName ` 42 | -Blob $file ` 43 | -Context $blobContext ` 44 | -Force | Out-Null 45 | } 46 | 47 | Write-Host "----------------------------------------" 48 | Write-Host "Upload complete." 49 | -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/DesignTimeBuild/.dtbcache: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/DesignTimeBuild/.dtbcache -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/v16/.suo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/v16/.suo -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/v16/Server/sqlite3/db.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/v16/Server/sqlite3/db.lock -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/v16/Server/sqlite3/storage.ide: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Execute Any Azure Data Factory Pipeline with an Azure Function/.vs/PipelineExecutor/v16/Server/sqlite3/storage.ide -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/PipelineExecutor.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 16 4 | VisualStudioVersion = 16.0.29728.190 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PipelineExecutor", "PipelineExecutor\PipelineExecutor.csproj", "{491422B7-393B-4421-BE9E-5B70E7746CC1}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {491422B7-393B-4421-BE9E-5B70E7746CC1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {491422B7-393B-4421-BE9E-5B70E7746CC1}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {491422B7-393B-4421-BE9E-5B70E7746CC1}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {491422B7-393B-4421-BE9E-5B70E7746CC1}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {170F5302-BB9C-4569-8F43-D859C7678EF0} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/PipelineExecutor/.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | 4 | # Azure Functions localsettings file 5 | local.settings.json 6 | 7 | # User-specific files 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Build results 17 | [Dd]ebug/ 18 | [Dd]ebugPublic/ 19 | [Rr]elease/ 20 | [Rr]eleases/ 21 | x64/ 22 | x86/ 23 | bld/ 24 | [Bb]in/ 25 | [Oo]bj/ 26 | [Ll]og/ 27 | 28 | # Visual Studio 2015 cache/options directory 29 | .vs/ 30 | # Uncomment if you have tasks that create the project's static files in wwwroot 31 | #wwwroot/ 32 | 33 | # MSTest test Results 34 | [Tt]est[Rr]esult*/ 35 | [Bb]uild[Ll]og.* 36 | 37 | # NUNIT 38 | *.VisualState.xml 39 | TestResult.xml 40 | 41 | # Build Results of an ATL Project 42 | [Dd]ebugPS/ 43 | [Rr]eleasePS/ 44 | dlldata.c 45 | 46 | # DNX 47 | project.lock.json 48 | project.fragment.lock.json 49 | artifacts/ 50 | 51 | *_i.c 52 | *_p.c 53 | *_i.h 54 | *.ilk 55 | *.meta 56 | *.obj 57 | *.pch 58 | *.pdb 59 | *.pgc 60 | *.pgd 61 | *.rsp 62 | *.sbr 63 | *.tlb 64 | *.tli 65 | *.tlh 66 | *.tmp 67 | *.tmp_proj 68 | *.log 69 | *.vspscc 70 | *.vssscc 71 | .builds 72 | *.pidb 73 | *.svclog 74 | *.scc 75 | 76 | # Chutzpah Test files 77 | _Chutzpah* 78 | 79 | # Visual C++ cache files 80 | ipch/ 81 | *.aps 82 | *.ncb 83 | *.opendb 84 | *.opensdf 85 | *.sdf 86 | *.cachefile 87 | *.VC.db 88 | *.VC.VC.opendb 89 | 90 | # Visual Studio profiler 91 | *.psess 92 | *.vsp 93 | *.vspx 94 | *.sap 95 | 96 | # TFS 2012 Local Workspace 97 | $tf/ 98 | 99 | # Guidance Automation Toolkit 100 | *.gpState 101 | 102 | # ReSharper is a .NET coding add-in 103 | _ReSharper*/ 104 | *.[Rr]e[Ss]harper 105 | *.DotSettings.user 106 | 107 | # JustCode is a .NET coding add-in 108 | .JustCode 109 | 110 | # TeamCity is a build add-in 111 | _TeamCity* 112 | 113 | # DotCover is a Code Coverage Tool 114 | *.dotCover 115 | 116 | # NCrunch 117 | _NCrunch_* 118 | .*crunch*.local.xml 119 | nCrunchTemp_* 120 | 121 | # MightyMoose 122 | *.mm.* 123 | AutoTest.Net/ 124 | 125 | # Web workbench (sass) 126 | .sass-cache/ 127 | 128 | # Installshield output folder 129 | [Ee]xpress/ 130 | 131 | # DocProject is a documentation generator add-in 132 | DocProject/buildhelp/ 133 | DocProject/Help/*.HxT 134 | DocProject/Help/*.HxC 135 | DocProject/Help/*.hhc 136 | DocProject/Help/*.hhk 137 | DocProject/Help/*.hhp 138 | DocProject/Help/Html2 139 | DocProject/Help/html 140 | 141 | # Click-Once directory 142 | publish/ 143 | 144 | # Publish Web Output 145 | *.[Pp]ublish.xml 146 | *.azurePubxml 147 | # TODO: Comment the next line if you want to checkin your web deploy settings 148 | # but database connection strings (with potential passwords) will be unencrypted 149 | #*.pubxml 150 | *.publishproj 151 | 152 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 153 | # checkin your Azure Web App publish settings, but sensitive information contained 154 | # in these scripts will be unencrypted 155 | PublishScripts/ 156 | 157 | # NuGet Packages 158 | *.nupkg 159 | # The packages folder can be ignored because of Package Restore 160 | **/packages/* 161 | # except build/, which is used as an MSBuild target. 162 | !**/packages/build/ 163 | # Uncomment if necessary however generally it will be regenerated when needed 164 | #!**/packages/repositories.config 165 | # NuGet v3's project.json files produces more ignoreable files 166 | *.nuget.props 167 | *.nuget.targets 168 | 169 | # Microsoft Azure Build Output 170 | csx/ 171 | *.build.csdef 172 | 173 | # Microsoft Azure Emulator 174 | ecf/ 175 | rcf/ 176 | 177 | # Windows Store app package directories and files 178 | AppPackages/ 179 | BundleArtifacts/ 180 | Package.StoreAssociation.xml 181 | _pkginfo.txt 182 | 183 | # Visual Studio cache files 184 | # files ending in .cache can be ignored 185 | *.[Cc]ache 186 | # but keep track of directories ending in .cache 187 | !*.[Cc]ache/ 188 | 189 | # Others 190 | ClientBin/ 191 | ~$* 192 | *~ 193 | *.dbmdl 194 | *.dbproj.schemaview 195 | *.jfm 196 | *.pfx 197 | *.publishsettings 198 | node_modules/ 199 | orleans.codegen.cs 200 | 201 | # Since there are multiple workflows, uncomment next line to ignore bower_components 202 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 203 | #bower_components/ 204 | 205 | # RIA/Silverlight projects 206 | Generated_Code/ 207 | 208 | # Backup & report files from converting an old project file 209 | # to a newer Visual Studio version. Backup files are not needed, 210 | # because we have git ;-) 211 | _UpgradeReport_Files/ 212 | Backup*/ 213 | UpgradeLog*.XML 214 | UpgradeLog*.htm 215 | 216 | # SQL Server files 217 | *.mdf 218 | *.ldf 219 | 220 | # Business Intelligence projects 221 | *.rdl.data 222 | *.bim.layout 223 | *.bim_*.settings 224 | 225 | # Microsoft Fakes 226 | FakesAssemblies/ 227 | 228 | # GhostDoc plugin setting file 229 | *.GhostDoc.xml 230 | 231 | # Node.js Tools for Visual Studio 232 | .ntvs_analysis.dat 233 | 234 | # Visual Studio 6 build log 235 | *.plg 236 | 237 | # Visual Studio 6 workspace options file 238 | *.opt 239 | 240 | # Visual Studio LightSwitch build output 241 | **/*.HTMLClient/GeneratedArtifacts 242 | **/*.DesktopClient/GeneratedArtifacts 243 | **/*.DesktopClient/ModelManifest.xml 244 | **/*.Server/GeneratedArtifacts 245 | **/*.Server/ModelManifest.xml 246 | _Pvt_Extensions 247 | 248 | # Paket dependency manager 249 | .paket/paket.exe 250 | paket-files/ 251 | 252 | # FAKE - F# Make 253 | .fake/ 254 | 255 | # JetBrains Rider 256 | .idea/ 257 | *.sln.iml 258 | 259 | # CodeRush 260 | .cr/ 261 | 262 | # Python Tools for Visual Studio (PTVS) 263 | __pycache__/ 264 | *.pyc -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/PipelineExecutor/ExecutePipeline.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.IO; 3 | using System.Threading.Tasks; 4 | using Microsoft.AspNetCore.Mvc; 5 | using Microsoft.Azure.WebJobs; 6 | using Microsoft.Azure.WebJobs.Extensions.Http; 7 | using Microsoft.AspNetCore.Http; 8 | using Microsoft.Extensions.Logging; 9 | using Newtonsoft.Json; 10 | using Microsoft.IdentityModel.Clients.ActiveDirectory; 11 | using Microsoft.Rest; 12 | using Microsoft.Azure.Management.DataFactory; 13 | using Microsoft.Azure.Management.DataFactory.Models; 14 | using System.Linq; 15 | using System.Collections.Generic; 16 | using Newtonsoft.Json.Linq; 17 | using System.Net.Http; 18 | 19 | namespace PipelineExecutor 20 | { 21 | public static class ExecutePipeline 22 | { 23 | [FunctionName("ExecutePipeline")] 24 | public static async Task Run( 25 | [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, 26 | ILogger log) 27 | { 28 | log.LogInformation("C# HTTP trigger function processed a request."); 29 | 30 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 31 | dynamic data = JsonConvert.DeserializeObject(requestBody); 32 | 33 | string tenantId = data?.tenantId; 34 | string applicationId = data?.applicationId; 35 | string authenticationKey = data?.authenticationKey; 36 | string subscriptionId = data?.subscriptionId; 37 | string resourceGroup = data?.resourceGroup; 38 | string factoryName = data?.factoryName; 39 | string pipelineName = data?.pipelineName; 40 | 41 | //Check body for values 42 | if ( 43 | tenantId == null || 44 | applicationId == null || 45 | authenticationKey == null || 46 | subscriptionId == null || 47 | factoryName == null || 48 | pipelineName == null 49 | ) 50 | { 51 | return new BadRequestObjectResult("Invalid request body, value missing."); 52 | } 53 | 54 | //Create a data factory management client 55 | var context = new AuthenticationContext("https://login.windows.net/" + tenantId); 56 | ClientCredential cc = new ClientCredential(applicationId, authenticationKey); 57 | AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; 58 | ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); 59 | var client = new DataFactoryManagementClient(cred) 60 | { 61 | SubscriptionId = subscriptionId 62 | }; 63 | 64 | //Run pipeline 65 | CreateRunResponse runResponse; 66 | PipelineRun pipelineRun; 67 | 68 | if (data?.pipelineParameters == null) 69 | { 70 | log.LogInformation("Called pipeline without parameters."); 71 | 72 | runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( 73 | resourceGroup, factoryName, pipelineName).Result.Body; 74 | } 75 | else 76 | { 77 | log.LogInformation("Called pipeline with parameters."); 78 | 79 | JObject jObj = JObject.Parse(requestBody); 80 | Dictionary parameters = jObj["pipelineParameters"].ToObject>(); 81 | 82 | log.LogInformation("Number of parameters provided: " + jObj.Count.ToString()); 83 | 84 | runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( 85 | resourceGroup, factoryName, pipelineName, parameters: parameters).Result.Body; 86 | } 87 | 88 | log.LogInformation("Pipeline run ID: " + runResponse.RunId); 89 | 90 | //Wait and check for pipeline result 91 | log.LogInformation("Checking pipeline run status..."); 92 | while (true) 93 | { 94 | pipelineRun = client.PipelineRuns.Get( 95 | resourceGroup, factoryName, runResponse.RunId); 96 | 97 | log.LogInformation("Status: " + pipelineRun.Status); 98 | 99 | if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") 100 | System.Threading.Thread.Sleep(15000); 101 | else 102 | break; 103 | } 104 | 105 | //Final return detail 106 | string outputString = "{ \"PipelineName\": \"" + pipelineName + "\", \"RunIdUsed\": \"" + pipelineRun.RunId + "\", \"Status\": \"" + pipelineRun.Status + "\" }"; 107 | JObject outputJson = JObject.Parse(outputString); 108 | return new OkObjectResult(outputJson); 109 | } 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/PipelineExecutor/PipelineExecutor.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | netcoreapp3.0 4 | v3 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | PreserveNewest 15 | 16 | 17 | PreserveNewest 18 | Never 19 | 20 | 21 | -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/PipelineExecutor/Properties/PublishProfiles/PipelineExecutor - Zip Deploy.pubxml: -------------------------------------------------------------------------------- 1 |  2 | 5 | 6 | 7 | ZipDeploy 8 | AzureWebSite 9 | Release 10 | Any CPU 11 | https://pipelineexecutor.azurewebsites.net 12 | False 13 | /subscriptions/77a3e40b-dd36-433c-90ca-a09f39724af3/resourcegroups/CommunityDemos/providers/Microsoft.Web/sites/PipelineExecutor 14 | $PipelineExecutor 15 | <_SavePWD>True 16 | https://pipelineexecutor.scm.azurewebsites.net/ 17 | 18 | -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/PipelineExecutor/host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0" 3 | } -------------------------------------------------------------------------------- /Execute Any Azure Data Factory Pipeline with an Azure Function/Visual.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Execute Any Azure Data Factory Pipeline with an Azure Function/Visual.pptx -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Compute Pools.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Compute Pools.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Home.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Home.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Notebook Attached and Language.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Notebook Attached and Language.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Spark.Net Notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Spark.Net Notebook.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Storage Explorer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Storage Explorer.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Storage New Notebook Result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Storage New Notebook Result.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Storage New Notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synapse Storage New Notebook.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synaspe Pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Synaspe Pipeline.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Visual Studio SparkNet NuGet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/First Time Playing with Spark.Net on Azure Synapse Analytics/Images/Visual Studio SparkNet NuGet.png -------------------------------------------------------------------------------- /First Time Playing with Spark.Net on Azure Synapse Analytics/PaulsNotebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "saveOutput": true, 4 | "language_info": { 5 | "name": "csharp" 6 | } 7 | }, 8 | "nbformat": 4, 9 | "nbformat_minor": 2, 10 | "cells": [ 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "## Paul Andrew Plaground\n", 16 | "\n", 17 | "[paul@mrpaulandrew.com](mailto:paul@mrpaulandrew.com)\n", 18 | "\n", 19 | "Playing around with the Spark.Net basics, using the examples from the Spark.Net tutorial here: \n", 20 | "[https://dotnet.microsoft.com/](https://dotnet.microsoft.com/learn/data/spark-tutorial/intro)\n", 21 | "" 22 | ], 23 | "attachments": {} 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "outputs": [], 29 | "metadata": {}, 30 | "source": [ 31 | "using Microsoft.Spark.Sql;\n", 32 | "\n", 33 | "string inputPath = \"abfss://paulplayground@mvpsynapsestorage.dfs.core.windows.net/mySparkApp/input.txt\";\n", 34 | "\n", 35 | "DataFrame rawData = spark.Read().Text(inputPath);" 36 | ], 37 | "attachments": {} 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "outputs": [], 43 | "metadata": {}, 44 | "source": [ 45 | "rawData.Show()" 46 | ], 47 | "attachments": {} 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "outputs": [], 53 | "metadata": {}, 54 | "source": [ 55 | "rawData.PrintSchema();" 56 | ], 57 | "attachments": {} 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "outputs": [], 63 | "metadata": {}, 64 | "source": [ 65 | "DataFrame words = rawData\n", 66 | " .Select(Functions.Split(Functions.Col(\"value\"), \" \").Alias(\"words\"))\n", 67 | " .Select(Functions.Explode(Functions.Col(\"words\"))\n", 68 | " .Alias(\"word\"))\n", 69 | " .GroupBy(\"word\")\n", 70 | " .Count()\n", 71 | " .OrderBy(Functions.Col(\"count\").Desc());\n", 72 | "\n", 73 | "words.Show();" 74 | ], 75 | "attachments": {} 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "outputs": [], 81 | "metadata": {}, 82 | "source": [ 83 | "words.CreateOrReplaceTempView(\"wordView\");\n", 84 | "\n", 85 | "DataFrame sqlDf = spark.Sql(\"SELECT * FROM wordView WHERE count > 1\");\n", 86 | "sqlDf.Show();" 87 | ], 88 | "attachments": {} 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "outputs": [], 94 | "metadata": {}, 95 | "source": [ 96 | "string outputPath = \"abfss://paulplayground@mvpsynapsestorage.dfs.core.windows.net/mySparkApp/wordsSummary.parquet\";\n", 97 | "\n", 98 | "words.Write().Mode(SaveMode.Overwrite).Parquet(outputPath);" 99 | ], 100 | "attachments": {} 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "outputs": [], 106 | "metadata": {}, 107 | "source": [ 108 | "spark.Stop();" 109 | ], 110 | "attachments": {} 111 | } 112 | ] 113 | } -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/.vs/Get Error Details/DesignTimeBuild/.dtbcache.v2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/.vs/Get Error Details/DesignTimeBuild/.dtbcache.v2 -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/.vs/Get Error Details/v16/.suo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/.vs/Get Error Details/v16/.suo -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/Get Error Details.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 16 4 | VisualStudioVersion = 16.0.30011.22 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Get Error Details", "Get Error Details\Get Error Details.csproj", "{542CAFB0-4F39-4CF9-8A34-3665836B3FB7}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {542CAFB0-4F39-4CF9-8A34-3665836B3FB7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {542CAFB0-4F39-4CF9-8A34-3665836B3FB7}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {542CAFB0-4F39-4CF9-8A34-3665836B3FB7}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {542CAFB0-4F39-4CF9-8A34-3665836B3FB7}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {56D8166E-6CB1-45CE-A7BF-BF6721CEFCC0} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/Get Error Details/.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | 4 | # Azure Functions localsettings file 5 | local.settings.json 6 | 7 | # User-specific files 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Build results 17 | [Dd]ebug/ 18 | [Dd]ebugPublic/ 19 | [Rr]elease/ 20 | [Rr]eleases/ 21 | x64/ 22 | x86/ 23 | bld/ 24 | [Bb]in/ 25 | [Oo]bj/ 26 | [Ll]og/ 27 | 28 | # Visual Studio 2015 cache/options directory 29 | .vs/ 30 | # Uncomment if you have tasks that create the project's static files in wwwroot 31 | #wwwroot/ 32 | 33 | # MSTest test Results 34 | [Tt]est[Rr]esult*/ 35 | [Bb]uild[Ll]og.* 36 | 37 | # NUNIT 38 | *.VisualState.xml 39 | TestResult.xml 40 | 41 | # Build Results of an ATL Project 42 | [Dd]ebugPS/ 43 | [Rr]eleasePS/ 44 | dlldata.c 45 | 46 | # DNX 47 | project.lock.json 48 | project.fragment.lock.json 49 | artifacts/ 50 | 51 | *_i.c 52 | *_p.c 53 | *_i.h 54 | *.ilk 55 | *.meta 56 | *.obj 57 | *.pch 58 | *.pdb 59 | *.pgc 60 | *.pgd 61 | *.rsp 62 | *.sbr 63 | *.tlb 64 | *.tli 65 | *.tlh 66 | *.tmp 67 | *.tmp_proj 68 | *.log 69 | *.vspscc 70 | *.vssscc 71 | .builds 72 | *.pidb 73 | *.svclog 74 | *.scc 75 | 76 | # Chutzpah Test files 77 | _Chutzpah* 78 | 79 | # Visual C++ cache files 80 | ipch/ 81 | *.aps 82 | *.ncb 83 | *.opendb 84 | *.opensdf 85 | *.sdf 86 | *.cachefile 87 | *.VC.db 88 | *.VC.VC.opendb 89 | 90 | # Visual Studio profiler 91 | *.psess 92 | *.vsp 93 | *.vspx 94 | *.sap 95 | 96 | # TFS 2012 Local Workspace 97 | $tf/ 98 | 99 | # Guidance Automation Toolkit 100 | *.gpState 101 | 102 | # ReSharper is a .NET coding add-in 103 | _ReSharper*/ 104 | *.[Rr]e[Ss]harper 105 | *.DotSettings.user 106 | 107 | # JustCode is a .NET coding add-in 108 | .JustCode 109 | 110 | # TeamCity is a build add-in 111 | _TeamCity* 112 | 113 | # DotCover is a Code Coverage Tool 114 | *.dotCover 115 | 116 | # NCrunch 117 | _NCrunch_* 118 | .*crunch*.local.xml 119 | nCrunchTemp_* 120 | 121 | # MightyMoose 122 | *.mm.* 123 | AutoTest.Net/ 124 | 125 | # Web workbench (sass) 126 | .sass-cache/ 127 | 128 | # Installshield output folder 129 | [Ee]xpress/ 130 | 131 | # DocProject is a documentation generator add-in 132 | DocProject/buildhelp/ 133 | DocProject/Help/*.HxT 134 | DocProject/Help/*.HxC 135 | DocProject/Help/*.hhc 136 | DocProject/Help/*.hhk 137 | DocProject/Help/*.hhp 138 | DocProject/Help/Html2 139 | DocProject/Help/html 140 | 141 | # Click-Once directory 142 | publish/ 143 | 144 | # Publish Web Output 145 | *.[Pp]ublish.xml 146 | *.azurePubxml 147 | # TODO: Comment the next line if you want to checkin your web deploy settings 148 | # but database connection strings (with potential passwords) will be unencrypted 149 | #*.pubxml 150 | *.publishproj 151 | 152 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 153 | # checkin your Azure Web App publish settings, but sensitive information contained 154 | # in these scripts will be unencrypted 155 | PublishScripts/ 156 | 157 | # NuGet Packages 158 | *.nupkg 159 | # The packages folder can be ignored because of Package Restore 160 | **/packages/* 161 | # except build/, which is used as an MSBuild target. 162 | !**/packages/build/ 163 | # Uncomment if necessary however generally it will be regenerated when needed 164 | #!**/packages/repositories.config 165 | # NuGet v3's project.json files produces more ignoreable files 166 | *.nuget.props 167 | *.nuget.targets 168 | 169 | # Microsoft Azure Build Output 170 | csx/ 171 | *.build.csdef 172 | 173 | # Microsoft Azure Emulator 174 | ecf/ 175 | rcf/ 176 | 177 | # Windows Store app package directories and files 178 | AppPackages/ 179 | BundleArtifacts/ 180 | Package.StoreAssociation.xml 181 | _pkginfo.txt 182 | 183 | # Visual Studio cache files 184 | # files ending in .cache can be ignored 185 | *.[Cc]ache 186 | # but keep track of directories ending in .cache 187 | !*.[Cc]ache/ 188 | 189 | # Others 190 | ClientBin/ 191 | ~$* 192 | *~ 193 | *.dbmdl 194 | *.dbproj.schemaview 195 | *.jfm 196 | *.pfx 197 | *.publishsettings 198 | node_modules/ 199 | orleans.codegen.cs 200 | 201 | # Since there are multiple workflows, uncomment next line to ignore bower_components 202 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 203 | #bower_components/ 204 | 205 | # RIA/Silverlight projects 206 | Generated_Code/ 207 | 208 | # Backup & report files from converting an old project file 209 | # to a newer Visual Studio version. Backup files are not needed, 210 | # because we have git ;-) 211 | _UpgradeReport_Files/ 212 | Backup*/ 213 | UpgradeLog*.XML 214 | UpgradeLog*.htm 215 | 216 | # SQL Server files 217 | *.mdf 218 | *.ldf 219 | 220 | # Business Intelligence projects 221 | *.rdl.data 222 | *.bim.layout 223 | *.bim_*.settings 224 | 225 | # Microsoft Fakes 226 | FakesAssemblies/ 227 | 228 | # GhostDoc plugin setting file 229 | *.GhostDoc.xml 230 | 231 | # Node.js Tools for Visual Studio 232 | .ntvs_analysis.dat 233 | 234 | # Visual Studio 6 build log 235 | *.plg 236 | 237 | # Visual Studio 6 workspace options file 238 | *.opt 239 | 240 | # Visual Studio LightSwitch build output 241 | **/*.HTMLClient/GeneratedArtifacts 242 | **/*.DesktopClient/GeneratedArtifacts 243 | **/*.DesktopClient/ModelManifest.xml 244 | **/*.Server/GeneratedArtifacts 245 | **/*.Server/ModelManifest.xml 246 | _Pvt_Extensions 247 | 248 | # Paket dependency manager 249 | .paket/paket.exe 250 | paket-files/ 251 | 252 | # FAKE - F# Make 253 | .fake/ 254 | 255 | # JetBrains Rider 256 | .idea/ 257 | *.sln.iml 258 | 259 | # CodeRush 260 | .cr/ 261 | 262 | # Python Tools for Visual Studio (PTVS) 263 | __pycache__/ 264 | *.pyc -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/Get Error Details/Get Error Details.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | netcoreapp3.1 4 | v3 5 | Get_Error_Details 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | PreserveNewest 17 | 18 | 19 | PreserveNewest 20 | Never 21 | 22 | 23 | -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/Get Error Details/GetActivityErrorDetails.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.IO; 3 | using System.Threading.Tasks; 4 | using Microsoft.AspNetCore.Mvc; 5 | using Microsoft.Azure.WebJobs; 6 | using Microsoft.Azure.WebJobs.Extensions.Http; 7 | using Microsoft.AspNetCore.Http; 8 | using Microsoft.Extensions.Logging; 9 | using Newtonsoft.Json; 10 | using Newtonsoft.Json.Linq; 11 | using Microsoft.IdentityModel.Clients.ActiveDirectory; 12 | using Microsoft.Azure.Management.DataFactory; 13 | using Microsoft.Rest; 14 | using Microsoft.Azure.Management.DataFactory.Models; 15 | 16 | namespace GetErrorDetails 17 | { 18 | public static class GetActivityErrorDetails 19 | { 20 | [FunctionName("GetActivityErrorDetails")] 21 | public static async Task Run( 22 | [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, 23 | ILogger log) 24 | { 25 | log.LogInformation("C# HTTP trigger function processed a request."); 26 | 27 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 28 | dynamic inputData = JsonConvert.DeserializeObject(requestBody); 29 | 30 | string tenantId = inputData?.tenantId; 31 | string applicationId = inputData?.applicationId; 32 | string authenticationKey = inputData?.authenticationKey; 33 | string subscriptionId = inputData?.subscriptionId; 34 | string resourceGroup = inputData?.resourceGroup; 35 | string factoryName = inputData?.factoryName; 36 | string pipelineName = inputData?.pipelineName; 37 | string runId = inputData?.runId; 38 | 39 | //Check body for values 40 | if ( 41 | tenantId == null || 42 | applicationId == null || 43 | authenticationKey == null || 44 | subscriptionId == null || 45 | resourceGroup == null || 46 | factoryName == null || 47 | pipelineName == null || 48 | runId == null 49 | ) 50 | { 51 | return new BadRequestObjectResult("Invalid request body, value missing."); 52 | } 53 | 54 | //Create a data factory management client 55 | var context = new AuthenticationContext("https://login.windows.net/" + tenantId); 56 | ClientCredential cc = new ClientCredential(applicationId, authenticationKey); 57 | AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; 58 | ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); 59 | var client = new DataFactoryManagementClient(cred) 60 | { 61 | SubscriptionId = subscriptionId 62 | }; 63 | 64 | //Get pipeline details 65 | int daysOfRuns = 7; //max duration for mandatory RunFilterParameters 66 | DateTime today = DateTime.Now; 67 | DateTime lastWeek = DateTime.Now.AddDays(-daysOfRuns); 68 | 69 | PipelineRun pipelineRun; 70 | pipelineRun = client.PipelineRuns.Get(resourceGroup, factoryName, runId); 71 | 72 | RunFilterParameters filterParams = new RunFilterParameters(lastWeek, today); 73 | ActivityRunsQueryResponse queryResponse = client.ActivityRuns.QueryByPipelineRun( 74 | resourceGroup, factoryName, runId, filterParams); 75 | 76 | //Create initial output content 77 | dynamic outputValues = new JObject(); 78 | 79 | outputValues.PipelineName = pipelineName; 80 | outputValues.PipelineStatus = pipelineRun.Status; 81 | outputValues.RunId = runId; 82 | outputValues.ResponseCount = queryResponse.Value.Count; 83 | outputValues.ResponseErrorCount = 0; 84 | outputValues.Errors = new JArray(); 85 | JObject errorDetails; 86 | 87 | log.LogInformation("Pipeline status: " + pipelineRun.Status); 88 | log.LogInformation("Activities found in pipeline response: " + queryResponse.Value.Count.ToString()); 89 | 90 | //Loop over activities in pipeline run 91 | foreach (var activity in queryResponse.Value) 92 | { 93 | if (String.IsNullOrEmpty(activity.Error.ToString())) 94 | { 95 | continue; //just incase 96 | } 97 | 98 | //Parse error output to customise output 99 | dynamic outputData = JsonConvert.DeserializeObject(activity.Error.ToString()); 100 | 101 | string errorCode = outputData?.errorCode; 102 | string errorType = outputData?.failureType; 103 | string errorMessage = outputData?.message; 104 | 105 | //Get output details 106 | if (!String.IsNullOrEmpty(errorCode)) 107 | { 108 | log.LogInformation("Activity name: " + activity.ActivityName); 109 | log.LogInformation("Activity type: " + activity.ActivityType); 110 | log.LogInformation("Error message: " + errorMessage); 111 | 112 | outputValues.ResponseErrorCount += 1; 113 | 114 | //Construct custom error information block 115 | errorDetails = JObject.Parse("{ \"ActivityName\": \"" + activity.ActivityName + 116 | "\", \"ActivityType\": \"" + activity.ActivityType + 117 | "\", \"ErrorCode\": \"" + errorCode + 118 | "\", \"ErrorType\": \"" + errorType + 119 | "\", \"ErrorMessage\": \"" + errorMessage + 120 | "\" }"); 121 | 122 | outputValues.Errors.Add(errorDetails); 123 | } 124 | } 125 | return new OkObjectResult(outputValues); 126 | } 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/Get Error Details/JavaScriptSerializer.cs: -------------------------------------------------------------------------------- 1 | namespace GetErrorDetails 2 | { 3 | internal class JavaScriptSerializer 4 | { 5 | } 6 | } -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Activity Error Details with Azure Functions/Get Error Details/Get Error Details/host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "logging": { 4 | "applicationInsights": { 5 | "samplingExcludedTypes": "Request", 6 | "samplingSettings": { 7 | "isEnabled": true 8 | } 9 | } 10 | } 11 | } -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/Filter Factory Code Snippet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/Filter Factory Code Snippet.png -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/DesignTimeBuild/.dtbcache: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/DesignTimeBuild/.dtbcache -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/DesignTimeBuild/.dtbcache.v2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/DesignTimeBuild/.dtbcache.v2 -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/.suo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/.suo -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/Server/sqlite3/db.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/Server/sqlite3/db.lock -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/Server/sqlite3/storage.ide: -------------------------------------------------------------------------------- 1 | SQLite format 3@ .A  -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/Server/sqlite3/storage.ide-shm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/Server/sqlite3/storage.ide-shm -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/Server/sqlite3/storage.ide-wal: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v15/Server/sqlite3/storage.ide-wal -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v16/.suo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v16/.suo -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v16/Server/sqlite3/db.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v16/Server/sqlite3/db.lock -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v16/Server/sqlite3/storage.ide: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/.vs/PipelineStatusChecker/v16/Server/sqlite3/storage.ide -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/PipelineStatusChecker.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 16 4 | VisualStudioVersion = 16.0.29215.179 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PipelineStatusChecker", "PipelineStatusChecker\PipelineStatusChecker.csproj", "{4174590C-515C-4C67-9114-734BC78AAE33}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {4174590C-515C-4C67-9114-734BC78AAE33}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {4174590C-515C-4C67-9114-734BC78AAE33}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {4174590C-515C-4C67-9114-734BC78AAE33}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {4174590C-515C-4C67-9114-734BC78AAE33}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {35AEC8A9-E6C5-4CE8-A302-63600D414967} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/PipelineStatusChecker/.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | 4 | # Azure Functions localsettings file 5 | local.settings.json 6 | 7 | # User-specific files 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Build results 17 | [Dd]ebug/ 18 | [Dd]ebugPublic/ 19 | [Rr]elease/ 20 | [Rr]eleases/ 21 | x64/ 22 | x86/ 23 | bld/ 24 | [Bb]in/ 25 | [Oo]bj/ 26 | [Ll]og/ 27 | 28 | # Visual Studio 2015 cache/options directory 29 | .vs/ 30 | # Uncomment if you have tasks that create the project's static files in wwwroot 31 | #wwwroot/ 32 | 33 | # MSTest test Results 34 | [Tt]est[Rr]esult*/ 35 | [Bb]uild[Ll]og.* 36 | 37 | # NUNIT 38 | *.VisualState.xml 39 | TestResult.xml 40 | 41 | # Build Results of an ATL Project 42 | [Dd]ebugPS/ 43 | [Rr]eleasePS/ 44 | dlldata.c 45 | 46 | # DNX 47 | project.lock.json 48 | project.fragment.lock.json 49 | artifacts/ 50 | 51 | *_i.c 52 | *_p.c 53 | *_i.h 54 | *.ilk 55 | *.meta 56 | *.obj 57 | *.pch 58 | *.pdb 59 | *.pgc 60 | *.pgd 61 | *.rsp 62 | *.sbr 63 | *.tlb 64 | *.tli 65 | *.tlh 66 | *.tmp 67 | *.tmp_proj 68 | *.log 69 | *.vspscc 70 | *.vssscc 71 | .builds 72 | *.pidb 73 | *.svclog 74 | *.scc 75 | 76 | # Chutzpah Test files 77 | _Chutzpah* 78 | 79 | # Visual C++ cache files 80 | ipch/ 81 | *.aps 82 | *.ncb 83 | *.opendb 84 | *.opensdf 85 | *.sdf 86 | *.cachefile 87 | *.VC.db 88 | *.VC.VC.opendb 89 | 90 | # Visual Studio profiler 91 | *.psess 92 | *.vsp 93 | *.vspx 94 | *.sap 95 | 96 | # TFS 2012 Local Workspace 97 | $tf/ 98 | 99 | # Guidance Automation Toolkit 100 | *.gpState 101 | 102 | # ReSharper is a .NET coding add-in 103 | _ReSharper*/ 104 | *.[Rr]e[Ss]harper 105 | *.DotSettings.user 106 | 107 | # JustCode is a .NET coding add-in 108 | .JustCode 109 | 110 | # TeamCity is a build add-in 111 | _TeamCity* 112 | 113 | # DotCover is a Code Coverage Tool 114 | *.dotCover 115 | 116 | # NCrunch 117 | _NCrunch_* 118 | .*crunch*.local.xml 119 | nCrunchTemp_* 120 | 121 | # MightyMoose 122 | *.mm.* 123 | AutoTest.Net/ 124 | 125 | # Web workbench (sass) 126 | .sass-cache/ 127 | 128 | # Installshield output folder 129 | [Ee]xpress/ 130 | 131 | # DocProject is a documentation generator add-in 132 | DocProject/buildhelp/ 133 | DocProject/Help/*.HxT 134 | DocProject/Help/*.HxC 135 | DocProject/Help/*.hhc 136 | DocProject/Help/*.hhk 137 | DocProject/Help/*.hhp 138 | DocProject/Help/Html2 139 | DocProject/Help/html 140 | 141 | # Click-Once directory 142 | publish/ 143 | 144 | # Publish Web Output 145 | *.[Pp]ublish.xml 146 | *.azurePubxml 147 | # TODO: Comment the next line if you want to checkin your web deploy settings 148 | # but database connection strings (with potential passwords) will be unencrypted 149 | #*.pubxml 150 | *.publishproj 151 | 152 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 153 | # checkin your Azure Web App publish settings, but sensitive information contained 154 | # in these scripts will be unencrypted 155 | PublishScripts/ 156 | 157 | # NuGet Packages 158 | *.nupkg 159 | # The packages folder can be ignored because of Package Restore 160 | **/packages/* 161 | # except build/, which is used as an MSBuild target. 162 | !**/packages/build/ 163 | # Uncomment if necessary however generally it will be regenerated when needed 164 | #!**/packages/repositories.config 165 | # NuGet v3's project.json files produces more ignoreable files 166 | *.nuget.props 167 | *.nuget.targets 168 | 169 | # Microsoft Azure Build Output 170 | csx/ 171 | *.build.csdef 172 | 173 | # Microsoft Azure Emulator 174 | ecf/ 175 | rcf/ 176 | 177 | # Windows Store app package directories and files 178 | AppPackages/ 179 | BundleArtifacts/ 180 | Package.StoreAssociation.xml 181 | _pkginfo.txt 182 | 183 | # Visual Studio cache files 184 | # files ending in .cache can be ignored 185 | *.[Cc]ache 186 | # but keep track of directories ending in .cache 187 | !*.[Cc]ache/ 188 | 189 | # Others 190 | ClientBin/ 191 | ~$* 192 | *~ 193 | *.dbmdl 194 | *.dbproj.schemaview 195 | *.jfm 196 | *.pfx 197 | *.publishsettings 198 | node_modules/ 199 | orleans.codegen.cs 200 | 201 | # Since there are multiple workflows, uncomment next line to ignore bower_components 202 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 203 | #bower_components/ 204 | 205 | # RIA/Silverlight projects 206 | Generated_Code/ 207 | 208 | # Backup & report files from converting an old project file 209 | # to a newer Visual Studio version. Backup files are not needed, 210 | # because we have git ;-) 211 | _UpgradeReport_Files/ 212 | Backup*/ 213 | UpgradeLog*.XML 214 | UpgradeLog*.htm 215 | 216 | # SQL Server files 217 | *.mdf 218 | *.ldf 219 | 220 | # Business Intelligence projects 221 | *.rdl.data 222 | *.bim.layout 223 | *.bim_*.settings 224 | 225 | # Microsoft Fakes 226 | FakesAssemblies/ 227 | 228 | # GhostDoc plugin setting file 229 | *.GhostDoc.xml 230 | 231 | # Node.js Tools for Visual Studio 232 | .ntvs_analysis.dat 233 | 234 | # Visual Studio 6 build log 235 | *.plg 236 | 237 | # Visual Studio 6 workspace options file 238 | *.opt 239 | 240 | # Visual Studio LightSwitch build output 241 | **/*.HTMLClient/GeneratedArtifacts 242 | **/*.DesktopClient/GeneratedArtifacts 243 | **/*.DesktopClient/ModelManifest.xml 244 | **/*.Server/GeneratedArtifacts 245 | **/*.Server/ModelManifest.xml 246 | _Pvt_Extensions 247 | 248 | # Paket dependency manager 249 | .paket/paket.exe 250 | paket-files/ 251 | 252 | # FAKE - F# Make 253 | .fake/ 254 | 255 | # JetBrains Rider 256 | .idea/ 257 | *.sln.iml 258 | 259 | # CodeRush 260 | .cr/ 261 | 262 | # Python Tools for Visual Studio (PTVS) 263 | __pycache__/ 264 | *.pyc -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/PipelineStatusChecker/Functions.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.IO; 3 | using System.Threading.Tasks; 4 | using Microsoft.AspNetCore.Mvc; 5 | using Microsoft.Azure.WebJobs; 6 | using Microsoft.Azure.WebJobs.Extensions.Http; 7 | using Microsoft.AspNetCore.Http; 8 | using Microsoft.Extensions.Logging; 9 | using Newtonsoft.Json; 10 | using Microsoft.IdentityModel.Clients.ActiveDirectory; 11 | using Microsoft.Rest; 12 | using Microsoft.Azure.Management.DataFactory; 13 | using Microsoft.Azure.Management.DataFactory.Models; 14 | using System.Linq; 15 | using System.Collections.Generic; 16 | using Newtonsoft.Json.Linq; 17 | 18 | namespace PipelineStatusChecker 19 | { 20 | public static class GetStatusByNameOnly 21 | { 22 | /// 23 | /// Gets the status of a data factory pipeline by name assuming the 24 | /// pipeline was executed within a recent time period. 25 | /// 26 | [FunctionName("GetStatusByNameOnly")] 27 | public static async Task Run( 28 | [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, 29 | ILogger log) 30 | { 31 | log.LogInformation("C# HTTP trigger function processed a request."); 32 | 33 | //Get body values 34 | string tenantId = req.Query["tenantId"]; 35 | string applicationId = req.Query["applicationId"]; 36 | string authenticationKey = req.Query["authenticationKey"]; 37 | string subscriptionId = req.Query["subscriptionId"]; 38 | string resourceGroup = req.Query["resourceGroup"]; 39 | string factoryName = req.Query["factoryName"]; 40 | string pipelineName = req.Query["pipelineName"]; 41 | 42 | int daysOfRuns = int.Parse(Environment.GetEnvironmentVariable("DefaultDaysForPipelineRuns")); 43 | 44 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 45 | dynamic data = JsonConvert.DeserializeObject(requestBody); 46 | 47 | tenantId = tenantId ?? data?.tenantId; 48 | applicationId = applicationId ?? data?.applicationId; 49 | authenticationKey = authenticationKey ?? data?.authenticationKey; 50 | subscriptionId = subscriptionId ?? data?.subscriptionId; 51 | resourceGroup = resourceGroup ?? data?.resourceGroup; 52 | factoryName = factoryName ?? data?.factoryName; 53 | pipelineName = pipelineName ?? data?.pipelineName; 54 | 55 | //Check body for values 56 | if ( 57 | tenantId == null || 58 | applicationId == null || 59 | authenticationKey == null || 60 | subscriptionId == null || 61 | factoryName == null || 62 | pipelineName == null 63 | ) 64 | { 65 | return new BadRequestObjectResult("Invalid request body, value missing."); 66 | } 67 | 68 | //Create a data factory management client 69 | var context = new AuthenticationContext("https://login.windows.net/" + tenantId); 70 | ClientCredential cc = new ClientCredential(applicationId, authenticationKey); 71 | AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; 72 | ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); 73 | var client = new DataFactoryManagementClient(cred) 74 | { 75 | SubscriptionId = subscriptionId 76 | }; 77 | 78 | //Get pipeline status 79 | PipelineRun pipelineRuns; //used to find latest pipeline run id 80 | PipelineRun pipelineRun; //used to get the status of the last pipeline 81 | string pipelineStatus = String.Empty; 82 | string runId = String.Empty; 83 | string outputString; 84 | DateTime today = DateTime.Now; 85 | DateTime lastWeek = DateTime.Now.AddDays(-daysOfRuns); 86 | 87 | /* 88 | * https://docs.microsoft.com/en-us/rest/api/datafactory/pipelineruns/querybyfactory#runqueryfilteroperand 89 | */ 90 | 91 | //Query data factory for pipeline runs 92 | IList pipelineList = new List { pipelineName }; 93 | IList moreParams = new List(); 94 | 95 | moreParams.Add(new RunQueryFilter 96 | { 97 | Operand = RunQueryFilterOperand.PipelineName, 98 | OperatorProperty = RunQueryFilterOperator.Equals, 99 | Values = pipelineList 100 | }); 101 | 102 | RunFilterParameters filterParams = new RunFilterParameters(lastWeek, today, null, moreParams, null); 103 | 104 | var requiredRuns = client.PipelineRuns.QueryByFactory(resourceGroup, factoryName, filterParams); 105 | var enumerator = requiredRuns.Value.GetEnumerator(); 106 | 107 | //Get latest run id 108 | for (bool hasMoreRuns = enumerator.MoveNext(); hasMoreRuns;) 109 | { 110 | pipelineRuns = enumerator.Current; 111 | hasMoreRuns = enumerator.MoveNext(); 112 | 113 | if(!hasMoreRuns && pipelineRuns.PipelineName == pipelineName) 114 | { 115 | //Get status for run id 116 | runId = pipelineRuns.RunId; 117 | pipelineStatus = client.PipelineRuns.Get(resourceGroup, factoryName, runId).Status; 118 | } 119 | } 120 | 121 | //Prepare output 122 | outputString = "{ \"PipelineName\": \"" + pipelineName + "\", \"RunIdUsed\": \"" + runId + "\", \"Status\": \"" + pipelineStatus + "\" }"; 123 | JObject json = JObject.Parse(outputString); 124 | 125 | return new OkObjectResult(json); 126 | } 127 | } 128 | 129 | public static class GetStatusByNameAndRunId 130 | { 131 | /// 132 | /// Gets the status of a data factory pipeline by name and execution run id. 133 | /// 134 | [FunctionName("GetStatusByNameAndRunId")] 135 | public static async Task Run( 136 | [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, 137 | ILogger log) 138 | { 139 | log.LogInformation("C# HTTP trigger function processed a request."); 140 | 141 | //Get body values 142 | string tenantId = req.Query["tenantId"]; 143 | string applicationId = req.Query["applicationId"]; 144 | string authenticationKey = req.Query["authenticationKey"]; 145 | string subscriptionId = req.Query["subscriptionId"]; 146 | string resourceGroup = req.Query["resourceGroup"]; 147 | string factoryName = req.Query["factoryName"]; 148 | string pipelineName = req.Query["pipelineName"]; 149 | string runId = req.Query["runId"]; 150 | 151 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 152 | dynamic data = JsonConvert.DeserializeObject(requestBody); 153 | 154 | tenantId = tenantId ?? data?.tenantId; 155 | applicationId = applicationId ?? data?.applicationId; 156 | authenticationKey = authenticationKey ?? data?.authenticationKey; 157 | subscriptionId = subscriptionId ?? data?.subscriptionId; 158 | resourceGroup = resourceGroup ?? data?.resourceGroup; 159 | factoryName = factoryName ?? data?.factoryName; 160 | pipelineName = pipelineName ?? data?.pipelineName; 161 | runId = runId ?? data?.runId; 162 | 163 | //Check body for values 164 | if ( 165 | tenantId == null || 166 | applicationId == null || 167 | authenticationKey == null || 168 | subscriptionId == null || 169 | factoryName == null || 170 | pipelineName == null || 171 | runId == null 172 | ) 173 | { 174 | return new BadRequestObjectResult("Invalid request body, value missing."); 175 | } 176 | 177 | //Create a data factory management client 178 | var context = new AuthenticationContext("https://login.windows.net/" + tenantId); 179 | ClientCredential cc = new ClientCredential(applicationId, authenticationKey); 180 | AuthenticationResult result = context.AcquireTokenAsync( 181 | "https://management.azure.com/", cc).Result; 182 | ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); 183 | var client = new DataFactoryManagementClient(cred) 184 | { 185 | SubscriptionId = subscriptionId 186 | }; 187 | 188 | //Get pipeline status with provided run id 189 | PipelineRun pipelineRun; 190 | string pipelineStatus = String.Empty; 191 | string outputString; 192 | 193 | pipelineRun = client.PipelineRuns.Get(resourceGroup, factoryName, runId); 194 | pipelineStatus = pipelineRun.Status; 195 | 196 | //Prepare output 197 | outputString = "{ \"PipelineName\": \"" + pipelineName + "\", \"RunIdUsed\": \"" + runId + "\", \"Status\": \"" + pipelineRun.Status + "\" }"; 198 | JObject json = JObject.Parse(outputString); 199 | 200 | return new OkObjectResult(json); 201 | } 202 | } 203 | 204 | public static class GetAndWaitForStatusByName 205 | { 206 | /// 207 | /// Gets the status of a data factory pipeline by name assuming the 208 | /// pipeline was executed within a recent time period. 209 | /// Waits until the pipeline returns causing the function to block its caller. 210 | /// 211 | [FunctionName("GetAndWaitForStatusByName")] 212 | public static async Task Run( 213 | [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, 214 | ILogger log) 215 | { 216 | log.LogInformation("C# HTTP trigger function processed a request."); 217 | 218 | string tenantId = req.Query["tenantId"]; 219 | string applicationId = req.Query["applicationId"]; 220 | string authenticationKey = req.Query["authenticationKey"]; 221 | string subscriptionId = req.Query["subscriptionId"]; 222 | string resourceGroup = req.Query["resourceGroup"]; 223 | string factoryName = req.Query["factoryName"]; 224 | string pipelineName = req.Query["pipelineName"]; 225 | 226 | int daysOfRuns = int.Parse(Environment.GetEnvironmentVariable("DefaultDaysForPipelineRuns")); 227 | 228 | string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); 229 | dynamic data = JsonConvert.DeserializeObject(requestBody); 230 | 231 | tenantId = tenantId ?? data?.tenantId; 232 | applicationId = applicationId ?? data?.applicationId; 233 | authenticationKey = authenticationKey ?? data?.authenticationKey; 234 | subscriptionId = subscriptionId ?? data?.subscriptionId; 235 | resourceGroup = resourceGroup ?? data?.resourceGroup; 236 | factoryName = factoryName ?? data?.factoryName; 237 | pipelineName = pipelineName ?? data?.pipelineName; 238 | 239 | if ( 240 | tenantId == null || 241 | applicationId == null || 242 | authenticationKey == null || 243 | subscriptionId == null || 244 | factoryName == null || 245 | pipelineName == null 246 | ) 247 | { 248 | return new BadRequestObjectResult("Invalid request body, value missing."); 249 | } 250 | 251 | // Authenticate and create a data factory management client 252 | var context = new AuthenticationContext("https://login.windows.net/" + tenantId); 253 | ClientCredential cc = new ClientCredential(applicationId, authenticationKey); 254 | AuthenticationResult result = context.AcquireTokenAsync( 255 | "https://management.azure.com/", cc).Result; 256 | ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); 257 | var client = new DataFactoryManagementClient(cred) 258 | { 259 | SubscriptionId = subscriptionId 260 | }; 261 | 262 | //Get pipeline status 263 | PipelineRun pipelineRuns; //used to find latest pipeline run id 264 | PipelineRun pipelineRun; //used to get the status of the last pipeline 265 | ActivityRunsQueryResponse queryResponse; //used if not successful 266 | string runId = String.Empty; 267 | string errorDetails = String.Empty; 268 | string outputString; 269 | DateTime today = DateTime.Now; 270 | DateTime lastWeek = DateTime.Now.AddDays(-daysOfRuns); 271 | 272 | /* 273 | * https://docs.microsoft.com/en-us/rest/api/datafactory/pipelineruns/querybyfactory#runqueryfilteroperand 274 | */ 275 | 276 | //Query data factory for pipeline runs 277 | IList pipelineList = new List { pipelineName }; 278 | IList moreParams = new List(); 279 | 280 | moreParams.Add(new RunQueryFilter 281 | { 282 | Operand = RunQueryFilterOperand.PipelineName, 283 | OperatorProperty = RunQueryFilterOperator.Equals, 284 | Values = pipelineList 285 | }); 286 | 287 | RunFilterParameters filterParams = new RunFilterParameters(lastWeek, today, null, moreParams, null); 288 | 289 | var requiredRuns = client.PipelineRuns.QueryByFactory(resourceGroup, factoryName, filterParams); 290 | var enumerator = requiredRuns.Value.GetEnumerator(); 291 | 292 | //Get latest run id 293 | for (bool hasMoreRuns = enumerator.MoveNext(); hasMoreRuns;) 294 | { 295 | pipelineRuns = enumerator.Current; 296 | hasMoreRuns = enumerator.MoveNext(); 297 | 298 | if (!hasMoreRuns && pipelineRuns.PipelineName == pipelineName) //&& just incase, filter above should deal with this 299 | { 300 | //Get run id 301 | runId = pipelineRuns.RunId; 302 | } 303 | } 304 | 305 | //Wait for success or fail 306 | while (true) 307 | { 308 | pipelineRun = client.PipelineRuns.Get(resourceGroup, factoryName, runId); 309 | 310 | //Console.WriteLine("Status: " + pipelineRun.Status); 311 | if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") 312 | { 313 | System.Threading.Thread.Sleep(15000); 314 | } 315 | else 316 | { 317 | break; 318 | } 319 | } 320 | 321 | //Get error details 322 | if (pipelineRun.Status != "Succeeded") 323 | { 324 | // Check the pipeline if it wasn't successful 325 | RunFilterParameters filterParamsForError = new RunFilterParameters(lastWeek, today); 326 | queryResponse = client.ActivityRuns.QueryByPipelineRun(resourceGroup, factoryName, runId, filterParamsForError); 327 | errorDetails = queryResponse.Value.First().Error.ToString(); 328 | } 329 | 330 | //Prepare output 331 | outputString = "{ \"PipelineName\": \"" + pipelineName + "\", \"RunIdUsed\": \"" + runId + "\", \"Status\": \"" + pipelineRun.Status + "\" }"; 332 | JObject json = JObject.Parse(outputString); 333 | 334 | return pipelineRun.Status == "Succeeded" 335 | ? (ActionResult)new OkObjectResult(json) 336 | : new BadRequestObjectResult($"{errorDetails}"); 337 | } 338 | } 339 | } 340 | -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/PipelineStatusChecker/PipelineStatusChecker.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | netcoreapp2.2 4 | v2 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | PreserveNewest 15 | 16 | 17 | PreserveNewest 18 | Never 19 | 20 | 21 | -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/PipelineStatusChecker/Properties/PublishProfiles/PipelineStatusChecker - Zip Deploy.pubxml: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | ZipDeploy 6 | AzureWebSite 7 | Release 8 | Any CPU 9 | https://pipelinestatuschecker.azurewebsites.net 10 | False 11 | /subscriptions/77a3e40b-dd36-433c-90ca-a09f39724af3/resourcegroups/CommunityDemos/providers/Microsoft.Web/sites/PipelineStatusChecker 12 | $PipelineStatusChecker 13 | <_SavePWD>True 14 | https://pipelinestatuschecker.scm.azurewebsites.net/ 15 | 16 | -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/PipelineStatusChecker/PipelineStatusChecker/host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0" 3 | } -------------------------------------------------------------------------------- /Get Any Azure Data Factory Pipeline Run Status with Azure Functions/Visual.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Any Azure Data Factory Pipeline Run Status with Azure Functions/Visual.pptx -------------------------------------------------------------------------------- /Get Data Factory to Check Itself for a Running Pipeline via the Azure Management API/Pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Check If Pipeline Is Already Running", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Get Subscription", 7 | "type": "WebActivity", 8 | "dependsOn": [], 9 | "policy": { 10 | "timeout": "7.00:00:00", 11 | "retry": 0, 12 | "retryIntervalInSeconds": 30, 13 | "secureOutput": false, 14 | "secureInput": false 15 | }, 16 | "userProperties": [], 17 | "typeProperties": { 18 | "url": "https://management.azure.com/subscriptions?api-version=2020-01-01", 19 | "method": "GET", 20 | "authentication": { 21 | "type": "MSI", 22 | "resource": "https://management.core.windows.net/" 23 | } 24 | } 25 | }, 26 | { 27 | "name": "Get Pipeline Runs", 28 | "type": "WebActivity", 29 | "dependsOn": [ 30 | { 31 | "activity": "Set Parsed Subscription", 32 | "dependencyConditions": [ 33 | "Succeeded" 34 | ] 35 | }, 36 | { 37 | "activity": "Get Resource Group", 38 | "dependencyConditions": [ 39 | "Succeeded" 40 | ] 41 | }, 42 | { 43 | "activity": "Get Query Run Days Value", 44 | "dependencyConditions": [ 45 | "Succeeded" 46 | ] 47 | }, 48 | { 49 | "activity": "Check for Valid Pipeline Name", 50 | "dependencyConditions": [ 51 | "Succeeded" 52 | ] 53 | } 54 | ], 55 | "policy": { 56 | "timeout": "7.00:00:00", 57 | "retry": 0, 58 | "retryIntervalInSeconds": 30, 59 | "secureOutput": false, 60 | "secureInput": false 61 | }, 62 | "userProperties": [], 63 | "typeProperties": { 64 | "url": { 65 | "value": "https://management.azure.com/subscriptions/@{variables('SubscriptionId')}/resourceGroups/@{activity('Get Resource Group').output.firstRow.PropertyValue}/providers/Microsoft.DataFactory/factories/@{pipeline().DataFactory}/queryPipelineRuns?api-version=2018-06-01", 66 | "type": "Expression" 67 | }, 68 | "method": "POST", 69 | "body": { 70 | "value": "{\n \"lastUpdatedAfter\": \"@{adddays(utcnow(),int(activity('Get Query Run Days Value').output.firstRow.PropertyValue))}\",\n \"lastUpdatedBefore\": \"@{utcnow()}\",\n \"filters\": [\n {\n \"operand\": \"PipelineName\",\n \"operator\": \"Equals\",\n \"values\": [\n \"@{pipeline().parameters.PipelineName}\"\n ]\n }\n ]\n}", 71 | "type": "Expression" 72 | }, 73 | "authentication": { 74 | "type": "MSI", 75 | "resource": "https://management.core.windows.net/" 76 | } 77 | } 78 | }, 79 | { 80 | "name": "Set Parsed Subscription", 81 | "type": "SetVariable", 82 | "dependsOn": [ 83 | { 84 | "activity": "Get Subscription", 85 | "dependencyConditions": [ 86 | "Succeeded" 87 | ] 88 | } 89 | ], 90 | "userProperties": [], 91 | "typeProperties": { 92 | "variableName": "SubscriptionId", 93 | "value": { 94 | "value": "@replace(activity('Get Subscription').output.value[0].id,'/subscriptions/','')", 95 | "type": "Expression" 96 | } 97 | } 98 | }, 99 | { 100 | "name": "Filter Running Pipelines", 101 | "type": "Filter", 102 | "dependsOn": [ 103 | { 104 | "activity": "Get Pipeline Runs", 105 | "dependencyConditions": [ 106 | "Succeeded" 107 | ] 108 | } 109 | ], 110 | "userProperties": [], 111 | "typeProperties": { 112 | "items": { 113 | "value": "@activity('Get Pipeline Runs').output.value", 114 | "type": "Expression" 115 | }, 116 | "condition": { 117 | "value": "@and(not(equals(item().runId,pipeline().parameters.ThisRunId)),or(equals(item().status,'InProgress'),equals(item().status,'Queued')))", 118 | "type": "Expression" 119 | } 120 | } 121 | }, 122 | { 123 | "name": "Get Resource Group", 124 | "type": "Lookup", 125 | "dependsOn": [], 126 | "policy": { 127 | "timeout": "7.00:00:00", 128 | "retry": 0, 129 | "retryIntervalInSeconds": 30, 130 | "secureOutput": false, 131 | "secureInput": false 132 | }, 133 | "userProperties": [], 134 | "typeProperties": { 135 | "source": { 136 | "type": "AzureSqlSource", 137 | "sqlReaderStoredProcedureName": "[procfwk].[GetPropertyValue]", 138 | "storedProcedureParameters": { 139 | "PropertyName": { 140 | "type": "String", 141 | "value": "FrameworkFactoryResourceGroup" 142 | } 143 | }, 144 | "queryTimeout": "02:00:00", 145 | "partitionOption": "None" 146 | }, 147 | "dataset": { 148 | "referenceName": "GetSetMetadata", 149 | "type": "DatasetReference" 150 | } 151 | } 152 | }, 153 | { 154 | "name": "Get Query Run Days Value", 155 | "type": "Lookup", 156 | "dependsOn": [], 157 | "policy": { 158 | "timeout": "7.00:00:00", 159 | "retry": 0, 160 | "retryIntervalInSeconds": 30, 161 | "secureOutput": false, 162 | "secureInput": false 163 | }, 164 | "userProperties": [], 165 | "typeProperties": { 166 | "source": { 167 | "type": "AzureSqlSource", 168 | "sqlReaderStoredProcedureName": "[procfwk].[GetPropertyValue]", 169 | "storedProcedureParameters": { 170 | "PropertyName": { 171 | "type": "String", 172 | "value": "PreviousPipelineRunsQueryRange" 173 | } 174 | }, 175 | "queryTimeout": "02:00:00", 176 | "partitionOption": "None" 177 | }, 178 | "dataset": { 179 | "referenceName": "GetSetMetadata", 180 | "type": "DatasetReference" 181 | } 182 | } 183 | }, 184 | { 185 | "name": "If Pipeline Is Running", 186 | "type": "IfCondition", 187 | "dependsOn": [ 188 | { 189 | "activity": "Filter Running Pipelines", 190 | "dependencyConditions": [ 191 | "Succeeded" 192 | ] 193 | } 194 | ], 195 | "userProperties": [], 196 | "typeProperties": { 197 | "expression": { 198 | "value": "@greaterOrEquals(int(activity('Filter Running Pipelines').output.FilteredItemsCount),1)", 199 | "type": "Expression" 200 | }, 201 | "ifTrueActivities": [ 202 | { 203 | "name": "Raise Error", 204 | "type": "Lookup", 205 | "dependsOn": [], 206 | "policy": { 207 | "timeout": "7.00:00:00", 208 | "retry": 0, 209 | "retryIntervalInSeconds": 30, 210 | "secureOutput": false, 211 | "secureInput": false 212 | }, 213 | "userProperties": [], 214 | "typeProperties": { 215 | "source": { 216 | "type": "AzureSqlSource", 217 | "sqlReaderQuery": { 218 | "value": "RAISERROR('@{concat('Provided pipeline name (',pipeline().parameters.PipelineName,') still has a run in progress or queued given the query range parameters set in the properties table.')}',16,1);", 219 | "type": "Expression" 220 | }, 221 | "queryTimeout": "02:00:00", 222 | "partitionOption": "None" 223 | }, 224 | "dataset": { 225 | "referenceName": "GetSetMetadata", 226 | "type": "DatasetReference" 227 | }, 228 | "firstRowOnly": false 229 | } 230 | } 231 | ] 232 | } 233 | }, 234 | { 235 | "name": "Check for Valid Pipeline Name", 236 | "type": "WebActivity", 237 | "dependsOn": [ 238 | { 239 | "activity": "Set Parsed Subscription", 240 | "dependencyConditions": [ 241 | "Succeeded" 242 | ] 243 | }, 244 | { 245 | "activity": "Get Resource Group", 246 | "dependencyConditions": [ 247 | "Succeeded" 248 | ] 249 | } 250 | ], 251 | "policy": { 252 | "timeout": "7.00:00:00", 253 | "retry": 0, 254 | "retryIntervalInSeconds": 30, 255 | "secureOutput": false, 256 | "secureInput": false 257 | }, 258 | "userProperties": [], 259 | "typeProperties": { 260 | "url": { 261 | "value": "https://management.azure.com/subscriptions/@{variables('SubscriptionId')}/resourceGroups/@{activity('Get Resource Group').output.firstRow.PropertyValue}/providers/Microsoft.DataFactory/factories/@{pipeline().DataFactory}/pipelines/@{pipeline().parameters.PipelineName}?api-version=2018-06-01", 262 | "type": "Expression" 263 | }, 264 | "method": "GET", 265 | "authentication": { 266 | "type": "MSI", 267 | "resource": "https://management.core.windows.net/" 268 | } 269 | } 270 | } 271 | ], 272 | "parameters": { 273 | "PipelineName": { 274 | "type": "string" 275 | }, 276 | "ThisRunId": { 277 | "type": "string" 278 | } 279 | }, 280 | "variables": { 281 | "SubscriptionId": { 282 | "type": "String" 283 | }, 284 | "RunCount": { 285 | "type": "String" 286 | } 287 | }, 288 | "folder": { 289 | "name": "_ProcFwk/_ProcFwkUtils" 290 | }, 291 | "annotations": [] 292 | } 293 | } -------------------------------------------------------------------------------- /Get Data Factory to Check Itself for a Running Pipeline via the Azure Management API/Social Media Image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Get Data Factory to Check Itself for a Running Pipeline via the Azure Management API/Social Media Image.png -------------------------------------------------------------------------------- /How To Use 'Specify dynamic contents in JSON format' in Azure Data Factory Linked Services/Dynamic Key Vault Linked Service.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "GenericKeys", 3 | "properties": { 4 | "parameters": { 5 | "baseUrl": { 6 | "type": "String" 7 | } 8 | }, 9 | "annotations": [], 10 | "type": "AzureKeyVault", 11 | "typeProperties": { 12 | "baseUrl": "@{linkedService().baseUrl}" 13 | } 14 | } 15 | } 16 | 17 | 18 | { 19 | "name": "trainingdb01", 20 | "type": "Microsoft.DataFactory/factories/linkedservices", 21 | "properties": { 22 | "annotations": [], 23 | "type": "AzureSqlDatabase", 24 | "typeProperties": { 25 | "connectionString": { 26 | "type": "AzureKeyVaultSecret", 27 | "store": { 28 | "referenceName": "@{linkedService().KeyVaultName}", 29 | "type": "LinkedServiceReference" 30 | }, 31 | "secretName": "ConnectionString-trainingdb01" 32 | } 33 | } 34 | } 35 | } -------------------------------------------------------------------------------- /Idea for Self Service Using Azure Synapse Analytics/Data Consumption Dashboards.pbix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Idea for Self Service Using Azure Synapse Analytics/Data Consumption Dashboards.pbix -------------------------------------------------------------------------------- /Idea for Self Service Using Azure Synapse Analytics/Images.vsdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Idea for Self Service Using Azure Synapse Analytics/Images.vsdx -------------------------------------------------------------------------------- /Idea for Self Service Using Azure Synapse Analytics/Notebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "saveOutput": false, 4 | "language_info": { 5 | "name": "csharp" 6 | } 7 | }, 8 | "nbformat": 4, 9 | "nbformat_minor": 2, 10 | "cells": [ 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "outputs": [], 15 | "metadata": { 16 | "tags": [ 17 | "parameters" 18 | ] 19 | }, 20 | "source": [ 21 | "string inputFilePathFull; // = \"/Landing/Year=2020/Month=10/Day=20/Hour=10/Gender.csv\";" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "outputs": [], 28 | "metadata": {}, 29 | "source": [ 30 | "using System.IO;\n", 31 | "\n", 32 | "string container = \"abfss://dataplatform@swimmingarea01.dfs.core.windows.net\";\n", 33 | "string dataSetName = Path.GetFileNameWithoutExtension(inputFilePathFull).Replace(\" \",\"\");\n", 34 | "string outputFilePathFull = \"/Consume/\" + dataSetName + \".parquet\";" 35 | ], 36 | "attachments": {} 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 3, 41 | "outputs": [], 42 | "metadata": {}, 43 | "source": [ 44 | "//read csv and infer schema\n", 45 | "var dfOptions = new Dictionary();\n", 46 | "\n", 47 | "dfOptions.Add(\"header\", \"true\");\n", 48 | "dfOptions.Add(\"inferSchema\", \"true\");\n", 49 | "\n", 50 | "DataFrame rawCsv = spark.Read().Options(dfOptions).Csv(container + inputFilePathFull);" 51 | ], 52 | "attachments": {} 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 4, 57 | "outputs": [], 58 | "metadata": {}, 59 | "source": [ 60 | "//write and use as parquet\n", 61 | "rawCsv.Write().Mode(SaveMode.Overwrite).Parquet(container + outputFilePathFull);\n", 62 | "DataFrame consumptionFile = spark.Read().Parquet(container + outputFilePathFull);" 63 | ], 64 | "attachments": {} 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 5, 69 | "outputs": [], 70 | "metadata": {}, 71 | "source": [ 72 | "//clean up if needed\n", 73 | "spark.Sql(\"DROP TABLE IF EXISTS \" + dataSetName);" 74 | ], 75 | "attachments": {} 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 6, 80 | "outputs": [], 81 | "metadata": {}, 82 | "source": [ 83 | "//create table for consumption\n", 84 | "consumptionFile.Write().SaveAsTable(dataSetName);" 85 | ], 86 | "attachments": {} 87 | } 88 | ] 89 | } -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Cluster Admin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Cluster Admin.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Data Studio - Master Instance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Data Studio - Master Instance.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Data Studio - Notebooks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Data Studio - Notebooks.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Grafana Node Dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Grafana Node Dashboard.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Grafana SQL Dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Grafana SQL Dashboard.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Icon.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Kubernetes Pods.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Kubernetes Pods.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/SSMS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/SSMS.png -------------------------------------------------------------------------------- /Interacting with SQL Server 2019 Big Data Clusters/Spark Server Logs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Interacting with SQL Server 2019 Big Data Clusters/Spark Server Logs.png -------------------------------------------------------------------------------- /PowerShell Export Databricks Workspace Items - Recurse/Export Databricks Workspace Items Recurse.ps1: -------------------------------------------------------------------------------- 1 | #As required, first use: 2 | #Install-Module -Name DatabricksPS -Scope CurrentUser 3 | #Import-Module -Name DatabricksPS 4 | 5 | #Local variables 6 | $Token = "dapi59e2096f88f91a2233784e2f8d1b5dd5" ## << ADD YOUR ACCESS TOKEN 7 | $Url = "https://northeurope.azuredatabricks.net" ## << SET YOUR SERVICE URL 8 | $OutputPath = "C:\Users\pja\Desktop\Test" ## << SET A LOCAL OUTPUT PATH 9 | $WorkspacePath = "/" ## << SET THE WORKSPACE PATH. '/' FOR ROOT OR A LOWER LEVEL IF NEEDED 10 | 11 | #Boiler plate code 12 | function Get-AllDatabricksItems ([hashtable] $Params) { 13 | $AllFiles = @() 14 | recurseDatabricksItem -AllFiles $AllFiles -Params $Params 15 | $AllFiles 16 | } 17 | 18 | function recurseDatabricksItem ($AllFiles, [hashtable] $Params) { 19 | 20 | $ChildItems = Get-DatabricksWorkspaceItem @Params; 21 | 22 | foreach ($ChildItem in $ChildItems) { 23 | 24 | switch ($ChildItem.object_type) { 25 | "NOTEBOOK" { 26 | $AllFiles += @{Path = $ChildItem.path; Language = $ChildItem.language; Type = $ChildItem.object_type} 27 | } 28 | "LIBRARY" { 29 | $AllFiles += @{Path = $ChildItem.path; Language = $ChildItem.language; Type = $ChildItem.object_type} 30 | } 31 | "DIRECTORY" { 32 | $Params.Remove("Path"); 33 | $Params.Add("Path", $ChildItem.path); 34 | recurseDatabricksItem -AllFiles $AllFiles -Params $Params; 35 | } 36 | } 37 | } 38 | 39 | $AllFiles | ForEach-Object { new-object PSObject -Property $_} 40 | } 41 | 42 | function lazyMKDir ([string] $path){ 43 | $Dir = split-path $path -Parent 44 | if (!(Test-Path -Path $Dir)) { 45 | New-Item -ItemType directory -Path $Dir | Out-Null 46 | } 47 | } 48 | 49 | Write-Host "Setting Databricks environment." 50 | 51 | Set-DatabricksEnvironment -AccessToken "$Token" -ApiRootUrl "$Url" | Out-Null 52 | 53 | Write-Host "Getting list of Workspace items." 54 | 55 | #Get items 56 | $DBItems = Get-AllDatabricksItems ` 57 | -Params @{ 'Path' = $WorkspacePath } 58 | 59 | Write-Host "Exporting Workspace items." 60 | 61 | #Export items 62 | ForEach ($DBItem in $DBItems) 63 | { 64 | 65 | if($DBItem.Type -eq "LIBRARY"){ 66 | $Info = "Export of libraries is not currently supported by the Databricks Workspace API. See Libraries API for more information https://docs.databricks.com/api/latest/libraries.html. Could not export: " + $DBItem.Path 67 | Write-Warning $Info 68 | } 69 | else 70 | { 71 | Write-Host "Exporting:" $DBItem.Path 72 | } 73 | 74 | switch($DBItem.Language){ 75 | "SCALA"{ 76 | $FullOutputPath = $OutputPath + $DBItem.Path + ".scala" 77 | lazyMKDir -path $FullOutputPath 78 | 79 | Export-DatabricksWorkspaceItem ` 80 | -Path $DBItem.Path ` 81 | -LocalPath $FullOutputPath ` 82 | -Format SOURCE 83 | } 84 | "PYTHON"{ 85 | $FullOutputPath = $OutputPath + $DBItem.Path + ".ipynb" 86 | lazyMKDir -path $FullOutputPath 87 | 88 | Export-DatabricksWorkspaceItem ` 89 | -Path $DBItem.Path ` 90 | -LocalPath $FullOutputPath ` 91 | -Format JUPYTER 92 | } 93 | "SQL" { 94 | $FullOutputPath = $OutputPath + $DBItem.Path + ".sql" 95 | lazyMKDir -path $FullOutputPath 96 | 97 | Export-DatabricksWorkspaceItem ` 98 | -Path $DBItem.Path ` 99 | -LocalPath $FullOutputPath ` 100 | -Format SOURCE 101 | } 102 | "R" { 103 | $FullOutputPath = $OutputPath + $DBItem.Path + ".r" 104 | lazyMKDir -path $FullOutputPath 105 | 106 | Export-DatabricksWorkspaceItem ` 107 | -Path $DBItem.Path ` 108 | -LocalPath $FullOutputPath ` 109 | -Format SOURCE 110 | } 111 | } 112 | 113 | } 114 | 115 | Write-Host "Export complete." 116 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Blog Supporting Content 2 | 3 | All sorts of things supporting various blog posts... Sub folders per blog post title. 4 | 5 | In blog post date order, with the latest post first: 6 | 7 | - [Creating a Simple Staged Metadata Driven Processing Framework for Azure Data Factory Pipelines](https://mrpaulandrew.com/2020/02/25/creating-a-simple-staged-metadata-driven-processing-framework-for-azure-data-factory-pipelines-part-1-of-4/) 8 | 9 | - [Execute Any Azure Data Factory Pipeline with an Azure Function](https://mrpaulandrew.com/2020/02/18/execute-any-azure-data-factory-pipeline-with-an-azure-function/) 10 | 11 | - [Summarise my Azure Data Factory ARM Template Using T-SQL](https://mrpaulandrew.com/2019/12/19/summarise-my-azure-data-factory-arm-template-using-t-sql/) 12 | 13 | - [Structuring Your Databricks Notebooks with Markdown, Titles, Widgets and Comments](https://mrpaulandrew.com/2019/11/28/structuring-your-databricks-notebooks-with-markdown-titles-widgets-and-comments/) 14 | 15 | - [Get Any Azure Data Factory Pipeline Run Status with Azure Functions](https://mrpaulandrew.com/2019/11/21/get-any-azure-data-factory-pipeline-run-status-with-azure-functions/) 16 | 17 | - [Azure Data Factory - Pipeline Hierarchies (Generation Control)](https://mrpaulandrew.com/2019/09/25/azure-data-factory-pipeline-hierarchies-generation-control/) 18 | 19 | - [Interacting with SQL Server 2019 Big Data Clusters](https://mrpaulandrew.com/2018/11/16/interacting-with-sql-server-2019-big-data-clusters/) 20 | 21 | - [Using Data Factory Parameterised Linked Services](https://mrpaulandrew.com/2018/11/15/using-data-factory-parameterised-linked-services/) 22 | 23 | - [Creating an Azure Data Factory v2 Custom Activity](https://mrpaulandrew.com/2018/11/12/creating-an-azure-data-factory-v2-custom-activity/) 24 | -------------------------------------------------------------------------------- /Scaling Azure Data Integration Pipelines With Regional Data Extraction And Central Processing/Data Pipelines Scaled.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Scaling Azure Data Integration Pipelines With Regional Data Extraction And Central Processing/Data Pipelines Scaled.pdf -------------------------------------------------------------------------------- /Scaling Azure Data Integration Pipelines With Regional Data Extraction And Central Processing/Data Pipelines Scaled.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Scaling Azure Data Integration Pipelines With Regional Data Extraction And Central Processing/Data Pipelines Scaled.png -------------------------------------------------------------------------------- /Structuring Your Databricks Notebooks with Markdown, Titles, Widgets and Comments/Databricks Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Structuring Your Databricks Notebooks with Markdown, Titles, Widgets and Comments/Databricks Icon.png -------------------------------------------------------------------------------- /Structuring Your Databricks Notebooks with Markdown, Titles, Widgets and Comments/Notebook Example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Structuring Your Databricks Notebooks with Markdown, Titles, Widgets and Comments/Notebook Example.png -------------------------------------------------------------------------------- /Structuring Your Databricks Notebooks with Markdown, Titles, Widgets and Comments/Notebook Example.scala: -------------------------------------------------------------------------------- 1 | // Databricks notebook source 2 | // MAGIC %md 3 | // MAGIC # Notebook Structure Example (Title) 4 | // MAGIC 5 | // MAGIC ## Overview 6 | // MAGIC 7 | // MAGIC | Detail Tag | Information | 8 | // MAGIC |------------|-------------| 9 | // MAGIC |Originally Created By | Paul Andrew ([paul@mrpaulandrew.com](mailto:paul@mrpaulandrew.com)) | 10 | // MAGIC |External References |[https://mrpaulandrew.tech](https://mrpaulandrew.tech) | 11 | // MAGIC |Input Datasets |
  • dbo.SalesOrderHeaders
  • dbo.SalesOrderDetails
| 12 | // MAGIC |Output Datasets |
  • fact.SalesOrderCount
| 13 | // MAGIC |Input Data Source |Azure SQLDB | 14 | // MAGIC |Output Data Source |Azure SQLDB | 15 | // MAGIC 16 | // MAGIC ## History 17 | // MAGIC 18 | // MAGIC | Date | Developed By | Reason | 19 | // MAGIC |:----:|--------------|--------| 20 | // MAGIC |27th Nov 2019 | Paul Andrew |Notebook created as an example of how they could be structured. | 21 | // MAGIC |28th Nov 2019 | Paul Andrew |Notebook updated with additional cells. | 22 | // MAGIC |29th Nov 2019 | Paul Andrew |Notebook updated for blog post. | 23 | // MAGIC 24 | // MAGIC ## Other Details 25 | // MAGIC This Notebook contains many cells with lots of titles and markdown to give details and context for future developers. 26 | 27 | // COMMAND ---------- 28 | 29 | // DBTITLE 1,Load Common Libraries 30 | // MAGIC %run "../Framework/MrPaulAndrew.Common" 31 | 32 | // COMMAND ---------- 33 | 34 | // MAGIC %run "../Framework/MrPaulAndrew.StorageConnections" 35 | 36 | // COMMAND ---------- 37 | 38 | // DBTITLE 1,Set & Get Widgets 39 | dbutils.widgets.text("RunDate","") 40 | 41 | // COMMAND ---------- 42 | 43 | // DBTITLE 1,Log Start 44 | createLogEntry("Example Notebook Structure Start.") 45 | 46 | // COMMAND ---------- 47 | 48 | // DBTITLE 1,Local Methods, Properties & Variables 49 | val outputTableName = "OrderLineCountScala" 50 | 51 | import java.sql.Timestamp 52 | import java.text.SimpleDateFormat 53 | import java.util.Date 54 | 55 | def getTimestamp(x:Any) : Timestamp = { 56 | val format = new SimpleDateFormat("yyyy-MM-dd") //expected format of widget 57 | if (x.toString() == "") 58 | return null 59 | else { 60 | val d = format.parse(x.toString()); 61 | val t = new Timestamp(d.getTime()); 62 | return t 63 | } 64 | } 65 | 66 | // COMMAND ---------- 67 | 68 | // DBTITLE 1,Extract 69 | val orderHeaderTable = spark.read.jdbc(jdbcUrl, "SalesLT.SalesOrderHeader", connectionProperties) 70 | val orderDetailTable = spark.read.jdbc(jdbcUrl, "SalesLT.SalesOrderDetail", connectionProperties) 71 | 72 | //just for testing with SQL 73 | spark.read.jdbc(jdbcUrl, "SalesLT.SalesOrderHeader", connectionProperties).createOrReplaceTempView("temp_salesOrderHeader") 74 | spark.read.jdbc(jdbcUrl, "SalesLT.SalesOrderDetail", connectionProperties).createOrReplaceTempView("temp_salesOrderDetail") 75 | 76 | // COMMAND ---------- 77 | 78 | // DBTITLE 1,Transform 79 | val ordersCount = orderHeaderTable 80 | .filter(orderHeaderTable("OrderDate").equalTo(getTimestamp(dbutils.widgets.get("RunDate")))) 81 | .join(orderDetailTable, orderHeaderTable("SalesOrderID") === orderDetailTable("SalesOrderID"), "inner") 82 | .groupBy("SalesOrderNumber") 83 | .count() 84 | .withColumnRenamed("count", "DetailLineCount") 85 | //.show() 86 | 87 | 88 | // COMMAND ---------- 89 | 90 | // DBTITLE 1,Test 91 | // MAGIC %sql 92 | // MAGIC 93 | // MAGIC select 94 | // MAGIC oh.SalesOrderNumber, 95 | // MAGIC count(od.SalesOrderDetailID) as DetailLineCount 96 | // MAGIC from 97 | // MAGIC temp_salesOrderHeader as oh 98 | // MAGIC join temp_salesOrderDetail as od 99 | // MAGIC on od.SalesOrderID = oh.SalesOrderID 100 | // MAGIC where 101 | // MAGIC oh.OrderDate = getArgument("RunDate") --implicit conversion from widget string 102 | // MAGIC group by 103 | // MAGIC oh.SalesOrderNumber 104 | // MAGIC order by 105 | // MAGIC oh.SalesOrderNumber 106 | 107 | // COMMAND ---------- 108 | 109 | // DBTITLE 1,Load/Output 110 | ordersCount.write 111 | .mode(SaveMode.Overwrite) //don't need to keep previous data 112 | .jdbc(jdbcUrl, outputTableName, connectionProperties) 113 | 114 | // COMMAND ---------- 115 | 116 | // DBTITLE 1,Log End 117 | createLogEntry("Example Notebook Structure End.") -------------------------------------------------------------------------------- /Summarise my Azure Data Factory ARM Template Using T-SQL/Create Table.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE [dbo].[ArmTemplates] 3 | ( 4 | [ARMTemplate] NVARCHAR(MAX) NULL 5 | ) 6 | GO 7 | 8 | 9 | -------------------------------------------------------------------------------- /Summarise my Azure Data Factory ARM Template Using T-SQL/Parse JSON.sql: -------------------------------------------------------------------------------- 1 | DECLARE @ARMJson NVARCHAR(MAX) 2 | SELECT @ARMJson = [ARMTemplate] from [dbo].[ArmTemplates] 3 | 4 | 5 | /* ------------------------------------------------------------ 6 | data factory name 7 | ------------------------------------------------------------ */ 8 | SELECT 9 | FactoryName.[defaultValue] AS DataFactoryName 10 | FROM 11 | --top level template 12 | OPENJSON(@ARMJson) WITH 13 | ( 14 | [parameters] NVARCHAR(MAX) AS JSON 15 | ) AS Params 16 | CROSS APPLY OPENJSON (Params.[parameters]) WITH 17 | ( 18 | [factoryName] NVARCHAR(MAX) AS JSON 19 | ) AS FactoryDetails 20 | CROSS APPLY OPENJSON (FactoryDetails.[factoryName]) WITH 21 | ( 22 | [type] NVARCHAR(128), 23 | [metadata] NVARCHAR(128), 24 | [defaultValue] NVARCHAR(128) 25 | ) AS FactoryName 26 | 27 | /* ------------------------------------------------------------ 28 | component summary 29 | ------------------------------------------------------------ */ 30 | SELECT 31 | UPPER(LEFT(REPLACE(ResourceDetails.[type],'Microsoft.DataFactory/factories/',''),1)) + 32 | RIGHT(REPLACE(ResourceDetails.[type],'Microsoft.DataFactory/factories/',''), 33 | LEN(REPLACE(ResourceDetails.[type],'Microsoft.DataFactory/factories/',''))-1) AS 'ComponentType', 34 | COUNT(*) AS 'Count' 35 | FROM 36 | --top level template 37 | OPENJSON(@ARMJson) WITH 38 | ( 39 | [resources] NVARCHAR(MAX) AS JSON 40 | ) AS ResourceArray 41 | 42 | --resource details 43 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 44 | ( 45 | [name] NVARCHAR(MAX), 46 | [type] NVARCHAR(500), 47 | [apiVersion] DATE, 48 | [properties] NVARCHAR(MAX) AS JSON 49 | ) AS ResourceDetails 50 | GROUP BY 51 | ResourceDetails.[type] 52 | 53 | UNION SELECT 54 | 'Activities', 55 | COUNT(ActivityDetails.[name]) AS 'Count' 56 | FROM 57 | --top level template 58 | OPENJSON(@ARMJson) WITH 59 | ( 60 | [resources] NVARCHAR(MAX) AS JSON 61 | ) AS ResourceArray 62 | 63 | --resource details 64 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 65 | ( 66 | [name] NVARCHAR(MAX), 67 | [type] NVARCHAR(500), 68 | [apiVersion] DATE, 69 | [properties] NVARCHAR(MAX) AS JSON 70 | ) AS ResourceDetails 71 | 72 | --pipeline details 73 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 74 | ( 75 | [activities] NVARCHAR(MAX) AS JSON, 76 | [description] NVARCHAR(MAX) 77 | ) AS Properties 78 | 79 | --activity details for count 80 | CROSS APPLY OPENJSON (Properties.[activities]) WITH 81 | ( 82 | [name] NVARCHAR(MAX) 83 | ) AS ActivityDetails 84 | WHERE 85 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/pipelines' 86 | 87 | 88 | /* ------------------------------------------------------------ 89 | pipeline information 90 | ------------------------------------------------------------ */ 91 | SELECT 92 | REPLACE(SUBSTRING(ResourceDetails.[name], CHARINDEX('/',ResourceDetails.[name])+1, 50),''')]','') AS 'PipelineName', 93 | Properties.[description] AS 'Description', 94 | Folder.[name] AS 'FolderName', 95 | COUNT(ActivityDetails.[name]) AS 'ActivityCount' 96 | FROM 97 | --top level template 98 | OPENJSON(@ARMJson) WITH 99 | ( 100 | [resources] NVARCHAR(MAX) AS JSON 101 | ) AS ResourceArray 102 | 103 | --resource details 104 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 105 | ( 106 | [name] NVARCHAR(MAX), 107 | [type] NVARCHAR(500), 108 | [apiVersion] DATE, 109 | [properties] NVARCHAR(MAX) AS JSON 110 | ) AS ResourceDetails 111 | 112 | --pipeline details 113 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 114 | ( 115 | [activities] NVARCHAR(MAX) AS JSON, 116 | [description] NVARCHAR(MAX), 117 | [folder] NVARCHAR(MAX) AS JSON 118 | ) AS Properties 119 | 120 | --folder details 121 | CROSS APPLY OPENJSON (Properties.[folder]) WITH 122 | ( 123 | [name] NVARCHAR(500) 124 | ) AS Folder 125 | 126 | --activity details for count 127 | CROSS APPLY OPENJSON (Properties.[activities]) WITH 128 | ( 129 | [name] NVARCHAR(MAX) 130 | ) AS ActivityDetails 131 | WHERE 132 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/pipelines' 133 | GROUP BY 134 | ResourceDetails.[name], 135 | Properties.[description], 136 | Folder.[name] 137 | 138 | 139 | /* ------------------------------------------------------------ 140 | activity information 141 | ------------------------------------------------------------ */ 142 | SELECT 143 | ActivityDetails.[name] AS 'ActivityName', 144 | ActivityDetails.[type] AS 'Type', 145 | ActivityDetails.[description] AS 'Description', 146 | REPLACE(SUBSTRING(ResourceDetails.[name], CHARINDEX('/',ResourceDetails.[name])+1, 50),''')]','') AS 'BelongsToPipeline' 147 | FROM 148 | --top level template 149 | OPENJSON(@ARMJson) WITH 150 | ( 151 | [resources] NVARCHAR(MAX) AS JSON 152 | ) AS ResourceArray 153 | 154 | --resource details 155 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 156 | ( 157 | [name] NVARCHAR(MAX), 158 | [type] NVARCHAR(500), 159 | [apiVersion] DATE, 160 | [properties] NVARCHAR(MAX) AS JSON 161 | ) AS ResourceDetails 162 | 163 | --pipeline details 164 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 165 | ( 166 | [activities] NVARCHAR(MAX) AS JSON, 167 | [description] NVARCHAR(MAX) 168 | ) AS Properties 169 | 170 | --activity details 171 | CROSS APPLY OPENJSON (Properties.[activities]) WITH 172 | ( 173 | [name] NVARCHAR(MAX), 174 | [description] NVARCHAR(MAX), 175 | [type] NVARCHAR(500) 176 | ) AS ActivityDetails 177 | WHERE 178 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/pipelines' 179 | 180 | 181 | /* ------------------------------------------------------------ 182 | linked service information 183 | ------------------------------------------------------------ */ 184 | SELECT 185 | REPLACE(SUBSTRING(ResourceDetails.[name], CHARINDEX('/',ResourceDetails.[name])+1, 50),''')]','') AS 'LinkedServiceName', 186 | Properties.[type] AS 'Type', 187 | CASE 188 | WHEN ResourceDetails.[properties] LIKE '%AzureKeyVaultSecret%' THEN 'Yes' 189 | ELSE 'No' 190 | END AS 'UsingKeyVault' 191 | FROM 192 | --top level template 193 | OPENJSON(@ARMJson) WITH 194 | ( 195 | [resources] NVARCHAR(MAX) AS JSON 196 | ) AS ResourceArray 197 | 198 | --resource details 199 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 200 | ( 201 | [name] NVARCHAR(MAX), 202 | [type] NVARCHAR(500), 203 | [apiVersion] DATE, 204 | [properties] NVARCHAR(MAX) AS JSON 205 | ) AS ResourceDetails 206 | 207 | --linked service details 208 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 209 | ( 210 | [type] NVARCHAR(MAX) 211 | ) AS Properties 212 | 213 | WHERE 214 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/linkedServices' 215 | 216 | 217 | 218 | /* ------------------------------------------------------------ 219 | dataset information 220 | ------------------------------------------------------------ */ 221 | SELECT 222 | REPLACE(SUBSTRING(ResourceDetails.[name], CHARINDEX('/',ResourceDetails.[name])+1, 50),''')]','') AS 'DatasetName', 223 | Properties.[type] AS 'Type', 224 | Folder.[name] AS 'FolderName', 225 | RelatedLinkedService.[referenceName] AS 'ConnectedToLinkedService' 226 | FROM 227 | --top level template 228 | OPENJSON(@ARMJson) WITH 229 | ( 230 | [resources] NVARCHAR(MAX) AS JSON 231 | ) AS ResourceArray 232 | 233 | --resource details 234 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 235 | ( 236 | [name] NVARCHAR(MAX), 237 | [type] NVARCHAR(500), 238 | [apiVersion] DATE, 239 | [properties] NVARCHAR(MAX) AS JSON 240 | ) AS ResourceDetails 241 | 242 | --dataset details 243 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 244 | ( 245 | [linkedServiceName] NVARCHAR(MAX) AS JSON, 246 | [type] NVARCHAR(MAX), 247 | [folder] NVARCHAR(MAX) AS JSON 248 | ) AS Properties 249 | 250 | --folder details 251 | CROSS APPLY OPENJSON (Properties.[folder]) WITH 252 | ( 253 | [name] NVARCHAR(500) 254 | ) AS Folder 255 | 256 | --linked service connection 257 | CROSS APPLY OPENJSON (Properties.[linkedServiceName]) WITH 258 | ( 259 | [referenceName] NVARCHAR(500) 260 | ) AS RelatedLinkedService 261 | WHERE 262 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/datasets' 263 | 264 | 265 | /* ------------------------------------------------------------ 266 | integration runtime information 267 | ------------------------------------------------------------ */ 268 | SELECT 269 | REPLACE(SUBSTRING(ResourceDetails.[name], CHARINDEX('/',ResourceDetails.[name])+1, 50),''')]','') AS 'IntegrationRuntimeName', 270 | Properties.[type] AS 'Type' 271 | FROM 272 | --top level template 273 | OPENJSON(@ARMJson) WITH 274 | ( 275 | [resources] NVARCHAR(MAX) AS JSON 276 | ) AS ResourceArray 277 | 278 | --resource details 279 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 280 | ( 281 | [name] NVARCHAR(MAX), 282 | [type] NVARCHAR(500), 283 | [apiVersion] DATE, 284 | [properties] NVARCHAR(MAX) AS JSON 285 | ) AS ResourceDetails 286 | 287 | --ir details 288 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 289 | ( 290 | [type] NVARCHAR(500) 291 | ) AS Properties 292 | WHERE 293 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/integrationRuntimes' 294 | 295 | 296 | /* ------------------------------------------------------------ 297 | dataflow information 298 | ------------------------------------------------------------ */ 299 | SELECT 300 | REPLACE(SUBSTRING(ResourceDetails.[name], CHARINDEX('/',ResourceDetails.[name])+1, 50),''')]','') AS 'DataFlowName', 301 | Properties.[type] AS 'Type' 302 | FROM 303 | --top level template 304 | OPENJSON(@ARMJson) WITH 305 | ( 306 | [resources] NVARCHAR(MAX) AS JSON 307 | ) AS ResourceArray 308 | 309 | --resource details 310 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 311 | ( 312 | [name] NVARCHAR(MAX), 313 | [type] NVARCHAR(500), 314 | [apiVersion] DATE, 315 | [properties] NVARCHAR(MAX) AS JSON 316 | ) AS ResourceDetails 317 | 318 | --df details 319 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 320 | ( 321 | [type] NVARCHAR(500) 322 | ) AS Properties 323 | WHERE 324 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/dataflows' 325 | 326 | 327 | /* ------------------------------------------------------------ 328 | trigger information 329 | ------------------------------------------------------------ */ 330 | SELECT 331 | REPLACE(SUBSTRING(ResourceDetails.[name], CHARINDEX('/',ResourceDetails.[name])+1, 50),''')]','') AS 'TriggerName', 332 | Properties.[type] AS 'Type', 333 | Properties.[runtimeState] AS 'Status' 334 | FROM 335 | --top level template 336 | OPENJSON(@ARMJson) WITH 337 | ( 338 | [resources] NVARCHAR(MAX) AS JSON 339 | ) AS ResourceArray 340 | 341 | --resource details 342 | CROSS APPLY OPENJSON (ResourceArray.[resources]) WITH 343 | ( 344 | [name] NVARCHAR(MAX), 345 | [type] NVARCHAR(500), 346 | [apiVersion] DATE, 347 | [properties] NVARCHAR(MAX) AS JSON 348 | ) AS ResourceDetails 349 | 350 | --trigger details 351 | CROSS APPLY OPENJSON (ResourceDetails.[properties]) WITH 352 | ( 353 | [runtimeState] NVARCHAR(500), 354 | [type] NVARCHAR(500) 355 | ) AS Properties 356 | WHERE 357 | ResourceDetails.[type] = 'Microsoft.DataFactory/factories/triggers' -------------------------------------------------------------------------------- /The Microsoft Inteligent Data Platform/Feature Picture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/The Microsoft Inteligent Data Platform/Feature Picture.png -------------------------------------------------------------------------------- /The Microsoft Inteligent Data Platform/Icon Timeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/The Microsoft Inteligent Data Platform/Icon Timeline.png -------------------------------------------------------------------------------- /The Microsoft Inteligent Data Platform/Learning Synapse ER Feature Image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/The Microsoft Inteligent Data Platform/Learning Synapse ER Feature Image.png -------------------------------------------------------------------------------- /The Microsoft Inteligent Data Platform/Time Line.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/The Microsoft Inteligent Data Platform/Time Line.pptx -------------------------------------------------------------------------------- /Thinking About an Azure Synapse Analytics Physical Architecture/Azure Synapse Physical Architecture v1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Thinking About an Azure Synapse Analytics Physical Architecture/Azure Synapse Physical Architecture v1.jpg -------------------------------------------------------------------------------- /Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse ARM Template Bug.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse ARM Template Bug.png -------------------------------------------------------------------------------- /Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse Artifacts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse Artifacts.png -------------------------------------------------------------------------------- /Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse Git Config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse Git Config.png -------------------------------------------------------------------------------- /Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse Release.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Trying to Deploy Azure Synapse Analytics Using ARM Templates/Synapse Release.png -------------------------------------------------------------------------------- /Trying to Deploy Azure Synapse Analytics Using ARM Templates/Vanilla Workspace Template.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.1", 4 | "parameters": { 5 | "name": { 6 | "type": "String" 7 | }, 8 | "location": { 9 | "type": "String" 10 | }, 11 | "defaultDataLakeStorageAccountName": { 12 | "type": "String" 13 | }, 14 | "defaultDataLakeStorageFilesystemName": { 15 | "type": "String" 16 | }, 17 | "sqlAdministratorLogin": { 18 | "type": "String" 19 | }, 20 | "sqlAdministratorLoginPassword": { 21 | "defaultValue": "", 22 | "type": "SecureString" 23 | }, 24 | "setWorkspaceIdentityRbacOnStorageAccount": { 25 | "type": "Bool" 26 | }, 27 | "allowAllConnections": { 28 | "defaultValue": true, 29 | "type": "Bool" 30 | }, 31 | "grantWorkspaceIdentityControlForSql": { 32 | "allowedValues": [ 33 | "Enabled", 34 | "Disabled" 35 | ], 36 | "type": "String" 37 | }, 38 | "managedVirtualNetwork": { 39 | "allowedValues": [ 40 | "default", 41 | "" 42 | ], 43 | "type": "String" 44 | }, 45 | "tagValues": { 46 | "defaultValue": {}, 47 | "type": "Object" 48 | }, 49 | "storageSubscriptionID": { 50 | "defaultValue": "[subscription().subscriptionId]", 51 | "type": "String" 52 | }, 53 | "storageResourceGroupName": { 54 | "defaultValue": "[resourceGroup().name]", 55 | "type": "String" 56 | }, 57 | "storageLocation": { 58 | "defaultValue": "[resourceGroup().location]", 59 | "type": "String" 60 | }, 61 | "storageRoleUniqueId": { 62 | "defaultValue": "[newGuid()]", 63 | "type": "String" 64 | }, 65 | "isNewStorageAccount": { 66 | "defaultValue": false, 67 | "type": "Bool" 68 | }, 69 | "isNewFileSystemOnly": { 70 | "defaultValue": false, 71 | "type": "Bool" 72 | }, 73 | "adlaResourceId": { 74 | "defaultValue": "", 75 | "type": "String" 76 | }, 77 | "storageAccessTier": { 78 | "type": "String" 79 | }, 80 | "storageAccountType": { 81 | "type": "String" 82 | }, 83 | "storageSupportsHttpsTrafficOnly": { 84 | "type": "Bool" 85 | }, 86 | "storageKind": { 87 | "type": "String" 88 | }, 89 | "storageIsHnsEnabled": { 90 | "type": "Bool" 91 | }, 92 | "userObjectId": { 93 | "defaultValue": "", 94 | "type": "String" 95 | }, 96 | "setSbdcRbacOnStorageAccount": { 97 | "defaultValue": false, 98 | "type": "Bool" 99 | } 100 | }, 101 | "variables": { 102 | "storageBlobDataContributorRoleID": "ba92f5b4-2d11-453d-a403-e96b0029c9fe", 103 | "defaultDataLakeStorageAccountUrl": "[concat('https://', parameters('defaultDataLakeStorageAccountName'), '.dfs.core.windows.net')]" 104 | }, 105 | "resources": [ 106 | { 107 | "type": "Microsoft.Synapse/workspaces", 108 | "apiVersion": "2019-06-01-preview", 109 | "name": "[parameters('name')]", 110 | "location": "[parameters('location')]", 111 | "dependsOn": [ 112 | "[concat('Microsoft.Storage/storageAccounts/', parameters('defaultDataLakeStorageAccountName'))]", 113 | "[concat('Microsoft.Resources/deployments/', parameters('defaultDataLakeStorageFilesystemName'))]" 114 | ], 115 | "tags": "[parameters('tagValues')]", 116 | "identity": { 117 | "type": "SystemAssigned" 118 | }, 119 | "properties": { 120 | "defaultDataLakeStorage": { 121 | "accountUrl": "[variables('defaultDataLakeStorageAccountUrl')]", 122 | "filesystem": "[parameters('defaultDataLakeStorageFilesystemName')]" 123 | }, 124 | "sqlAdministratorLogin": "[parameters('sqlAdministratorLogin')]", 125 | "sqlAdministratorLoginPassword": "[parameters('sqlAdministratorLoginPassword')]", 126 | "adlaResourceId": "[parameters('adlaResourceId')]", 127 | "managedVirtualNetwork": "[parameters('managedVirtualNetwork')]" 128 | }, 129 | "resources": [ 130 | { 131 | "type": "firewallrules", 132 | "apiVersion": "2019-06-01-preview", 133 | "name": "allowAll", 134 | "location": "[parameters('location')]", 135 | "dependsOn": [ 136 | "[concat('Microsoft.Synapse/workspaces/', parameters('name'))]" 137 | ], 138 | "properties": { 139 | "startIpAddress": "0.0.0.0", 140 | "endIpAddress": "255.255.255.255" 141 | }, 142 | "condition": "[parameters('allowAllConnections')]" 143 | }, 144 | { 145 | "type": "managedIdentitySqlControlSettings", 146 | "apiVersion": "2019-06-01-preview", 147 | "name": "default", 148 | "location": "[parameters('location')]", 149 | "dependsOn": [ 150 | "[concat('Microsoft.Synapse/workspaces/', parameters('name'))]" 151 | ], 152 | "properties": { 153 | "grantSqlControlToManagedIdentity": { 154 | "desiredState": "[parameters('grantWorkspaceIdentityControlForSql')]" 155 | } 156 | } 157 | } 158 | ] 159 | }, 160 | { 161 | "type": "Microsoft.Resources/deployments", 162 | "apiVersion": "2019-05-01", 163 | "name": "storageRoleDeploymentResource", 164 | "dependsOn": [ 165 | "[concat('Microsoft.Synapse/workspaces/', parameters('name'))]" 166 | ], 167 | "properties": { 168 | "mode": "Incremental", 169 | "template": { 170 | "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", 171 | "contentVersion": "1.0.0.0", 172 | "parameters": {}, 173 | "variables": {}, 174 | "resources": [ 175 | { 176 | "type": "Microsoft.Storage/storageAccounts/providers/roleAssignments", 177 | "apiVersion": "2018-09-01-preview", 178 | "name": "[concat(parameters('defaultDataLakeStorageAccountName'), '/Microsoft.Authorization/', guid(concat(resourceGroup().id, '/', variables('storageBlobDataContributorRoleID'), '/', parameters('name'), '/', parameters('storageRoleUniqueId'))))]", 179 | "location": "[parameters('storageLocation')]", 180 | "properties": { 181 | "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('storageBlobDataContributorRoleID'))]", 182 | "principalId": "[reference(concat('Microsoft.Synapse/workspaces/', parameters('name')), '2019-06-01-preview', 'Full').identity.principalId]", 183 | "principalType": "ServicePrincipal" 184 | } 185 | }, 186 | { 187 | "condition": "[parameters('setSbdcRbacOnStorageAccount')]", 188 | "type": "Microsoft.Storage/storageAccounts/providers/roleAssignments", 189 | "apiVersion": "2018-09-01-preview", 190 | "name": "[concat(parameters('defaultDataLakeStorageAccountName'), '/Microsoft.Authorization/', guid(concat(resourceGroup().id, '/', variables('storageBlobDataContributorRoleID'), '/', parameters('userObjectId'), '/', parameters('storageRoleUniqueId'))))]", 191 | "properties": { 192 | "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('storageBlobDataContributorRoleID'))]", 193 | "principalId": "[parameters('userObjectId')]", 194 | "principalType": "User" 195 | } 196 | } 197 | ] 198 | } 199 | }, 200 | "subscriptionId": "[parameters('storageSubscriptionID')]", 201 | "resourceGroup": "[parameters('storageResourceGroupName')]", 202 | "condition": "[parameters('setWorkspaceIdentityRbacOnStorageAccount')]" 203 | }, 204 | { 205 | "type": "Microsoft.Storage/storageAccounts", 206 | "apiVersion": "2018-02-01", 207 | "name": "[parameters('defaultDataLakeStorageAccountName')]", 208 | "location": "[parameters('storageLocation')]", 209 | "tags": {}, 210 | "sku": { 211 | "name": "[parameters('storageAccountType')]" 212 | }, 213 | "kind": "[parameters('storageKind')]", 214 | "properties": { 215 | "accessTier": "[parameters('storageAccessTier')]", 216 | "supportsHttpsTrafficOnly": "[parameters('storageSupportsHttpsTrafficOnly')]", 217 | "isHnsEnabled": "[parameters('storageIsHnsEnabled')]" 218 | }, 219 | "resources": [ 220 | { 221 | "type": "blobServices/containers", 222 | "apiVersion": "2018-02-01", 223 | "name": "[concat('default/', parameters('defaultDataLakeStorageFilesystemName'))]", 224 | "dependsOn": [ 225 | "[concat('Microsoft.Storage/storageAccounts/', parameters('defaultDataLakeStorageAccountName'))]" 226 | ], 227 | "properties": { 228 | "publicAccess": "None" 229 | }, 230 | "condition": "[parameters('isNewStorageAccount')]" 231 | } 232 | ], 233 | "condition": "[parameters('isNewStorageAccount')]" 234 | }, 235 | { 236 | "type": "Microsoft.Resources/deployments", 237 | "apiVersion": "2019-05-01", 238 | "name": "[parameters('defaultDataLakeStorageFilesystemName')]", 239 | "properties": { 240 | "mode": "Incremental", 241 | "template": { 242 | "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", 243 | "contentVersion": "1.0.0.0", 244 | "parameters": {}, 245 | "variables": {}, 246 | "resources": [ 247 | { 248 | "type": "Microsoft.Storage/storageAccounts/blobServices/containers", 249 | "name": "[concat(parameters('defaultDataLakeStorageAccountName'), '/default/', parameters('defaultDataLakeStorageFilesystemName'))]", 250 | "apiVersion": "2018-02-01", 251 | "properties": { 252 | "publicAccess": "None" 253 | } 254 | } 255 | ] 256 | } 257 | }, 258 | "subscriptionId": "[parameters('storageSubscriptionID')]", 259 | "resourceGroup": "[parameters('storageResourceGroupName')]", 260 | "condition": "[parameters('isNewFileSystemOnly')]" 261 | } 262 | ], 263 | "outputs": {} 264 | } -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/DSWithParams.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/DSWithParams.png -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Drawing1.vsdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/Drawing1.vsdx -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Drop All Target DB FKs.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | 'ALTER TABLE SalesLT.' + SUBSTRING(REPLACE(name,'FK_',''),0,CHARINDEX('_',REPLACE(name,'FK_',''))) + ' ' + 3 | 'DROP CONSTRAINT ' + name 4 | FROM 5 | sys.objects 6 | WHERE 7 | type = 'f' 8 | 9 | -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/GenericSQLDB.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "GenericSQLDB", 3 | "type": "Microsoft.DataFactory/factories/linkedservices", 4 | "properties": { 5 | "parameters": { 6 | "ServerInstance": { 7 | "type": "String" 8 | }, 9 | "DatabaseName": { 10 | "type": "String" 11 | }, 12 | "SQLUser": { 13 | "type": "String" 14 | }, 15 | "SQLPassword": { 16 | "type": "String" 17 | } 18 | }, 19 | "type": "AzureSqlDatabase", 20 | "typeProperties": { 21 | "connectionString": "Integrated Security=False;Encrypt=True;Connection Timeout=30;Data Source=@{linkedService().ServerInstance};Initial Catalog=@{linkedService().DatabaseName};User ID=@{linkedService().SQLUser};Password=@{linkedService().SQLPassword}" 22 | } 23 | } 24 | } -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/GenericSQLTable.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "GenericSQLTable", 3 | "properties": { 4 | "linkedServiceName": { 5 | "referenceName": "GenericSQLDB", 6 | "type": "LinkedServiceReference", 7 | "parameters": { 8 | "ServerInstance": { 9 | "value": "@dataset().DSServerInstance", 10 | "type": "Expression" 11 | }, 12 | "DatabaseName": { 13 | "value": "@dataset().DSDatabaseName", 14 | "type": "Expression" 15 | }, 16 | "SQLUser": { 17 | "value": "@dataset().DSSQLUser", 18 | "type": "Expression" 19 | }, 20 | "SQLPassword": { 21 | "value": "@dataset().DSSQLPassword", 22 | "type": "Expression" 23 | } 24 | } 25 | }, 26 | "parameters": { 27 | "DSServerInstance": { 28 | "type": "String" 29 | }, 30 | "DSDatabaseName": { 31 | "type": "String" 32 | }, 33 | "DSSQLUser": { 34 | "type": "String" 35 | }, 36 | "DSSQLPassword": { 37 | "type": "String" 38 | }, 39 | "DSTableName": { 40 | "type": "String" 41 | } 42 | }, 43 | "folder": { 44 | "name": "Demo Datasets" 45 | }, 46 | "type": "AzureSqlTable", 47 | "typeProperties": { 48 | "tableName": { 49 | "value": "@dataset().DSTableName", 50 | "type": "Expression" 51 | } 52 | } 53 | } 54 | } -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Get Tables Query.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | s.name + '.' + o.name AS TableName 3 | FROM 4 | sys.objects o 5 | INNER JOIN sys.schemas s 6 | ON o.schema_id = s.schema_id 7 | WHERE 8 | s.name = 'SalesLT' 9 | AND o.type = 'U' -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Linked Service Param Flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/Linked Service Param Flow.png -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/LinkedServiceParams.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/LinkedServiceParams.png -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Metadata Driven Pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "DynamicLinkedServiceBootstrap", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "GetTableList", 7 | "type": "Lookup", 8 | "policy": { 9 | "timeout": "7.00:00:00", 10 | "retry": 0, 11 | "retryIntervalInSeconds": 30, 12 | "secureOutput": false, 13 | "secureInput": false 14 | }, 15 | "typeProperties": { 16 | "source": { 17 | "type": "SqlSource", 18 | "sqlReaderQuery": "SELECT \n\ts.name + '.' + o.name AS TableName\nFROM \n\tsys.objects o\n\tINNER JOIN sys.schemas s\n\t\tON o.schema_id = s.schema_id\nWHERE\n\ts.name = 'SalesLT'\n\tAND o.type = 'U'" 19 | }, 20 | "dataset": { 21 | "referenceName": "GenericSQLTable", 22 | "type": "DatasetReference", 23 | "parameters": { 24 | "DSServerInstance": { 25 | "value": "@pipeline().parameters.PSourceServerInstance", 26 | "type": "Expression" 27 | }, 28 | "DSDatabaseName": { 29 | "value": "@pipeline().parameters.PSourceDatabaseName", 30 | "type": "Expression" 31 | }, 32 | "DSSQLUser": { 33 | "value": "@pipeline().parameters.PSourceSQLUser", 34 | "type": "Expression" 35 | }, 36 | "DSSQLPassword": { 37 | "value": "@pipeline().parameters.PSourceSQLPassword", 38 | "type": "Expression" 39 | }, 40 | "DSTableName": "objects" 41 | } 42 | }, 43 | "firstRowOnly": false 44 | } 45 | }, 46 | { 47 | "name": "Copy All Tables", 48 | "type": "ForEach", 49 | "dependsOn": [ 50 | { 51 | "activity": "GetTableList", 52 | "dependencyConditions": [ 53 | "Succeeded" 54 | ] 55 | } 56 | ], 57 | "typeProperties": { 58 | "items": { 59 | "value": "@activity('GetTableList').output.value", 60 | "type": "Expression" 61 | }, 62 | "activities": [ 63 | { 64 | "name": "Truncate Target Table", 65 | "type": "SqlServerStoredProcedure", 66 | "policy": { 67 | "timeout": "7.00:00:00", 68 | "retry": 0, 69 | "retryIntervalInSeconds": 30, 70 | "secureOutput": false, 71 | "secureInput": false 72 | }, 73 | "typeProperties": { 74 | "storedProcedureName": "sys.sp_executesql", 75 | "storedProcedureParameters": { 76 | "statement": { 77 | "value": { 78 | "value": "@concat('TRUNCATE TABLE ',item().TableName)", 79 | "type": "Expression" 80 | }, 81 | "type": "string" 82 | } 83 | } 84 | }, 85 | "linkedServiceName": { 86 | "referenceName": "GenericSQLDB", 87 | "type": "LinkedServiceReference", 88 | "parameters": { 89 | "ServerInstance": { 90 | "value": "@pipeline().parameters.PSinkServerInstance", 91 | "type": "Expression" 92 | }, 93 | "DatabaseName": { 94 | "value": "@pipeline().parameters.PSinkDatabaseName", 95 | "type": "Expression" 96 | }, 97 | "SQLUser": { 98 | "value": "@pipeline().parameters.PSinkSQLUser", 99 | "type": "Expression" 100 | }, 101 | "SQLPassword": { 102 | "value": "@pipeline().parameters.PSinkSQLPassword", 103 | "type": "Expression" 104 | } 105 | } 106 | } 107 | }, 108 | { 109 | "name": "Copy Table", 110 | "type": "Copy", 111 | "dependsOn": [ 112 | { 113 | "activity": "Truncate Target Table", 114 | "dependencyConditions": [ 115 | "Succeeded" 116 | ] 117 | } 118 | ], 119 | "policy": { 120 | "timeout": "7.00:00:00", 121 | "retry": 0, 122 | "retryIntervalInSeconds": 30, 123 | "secureOutput": false, 124 | "secureInput": false 125 | }, 126 | "typeProperties": { 127 | "source": { 128 | "type": "SqlSource" 129 | }, 130 | "sink": { 131 | "type": "SqlSink", 132 | "writeBatchSize": 10000 133 | }, 134 | "enableStaging": false, 135 | "dataIntegrationUnits": 0 136 | }, 137 | "inputs": [ 138 | { 139 | "referenceName": "GenericSQLTable", 140 | "type": "DatasetReference", 141 | "parameters": { 142 | "DSServerInstance": { 143 | "value": "@pipeline().parameters.PSourceServerInstance", 144 | "type": "Expression" 145 | }, 146 | "DSDatabaseName": { 147 | "value": "@pipeline().parameters.PSourceDatabaseName", 148 | "type": "Expression" 149 | }, 150 | "DSSQLUser": { 151 | "value": "@pipeline().parameters.PSourceSQLUser", 152 | "type": "Expression" 153 | }, 154 | "DSSQLPassword": { 155 | "value": "@pipeline().parameters.PSourceSQLPassword", 156 | "type": "Expression" 157 | }, 158 | "DSTableName": { 159 | "value": "@{item().TableName}", 160 | "type": "Expression" 161 | } 162 | } 163 | } 164 | ], 165 | "outputs": [ 166 | { 167 | "referenceName": "GenericSQLTable", 168 | "type": "DatasetReference", 169 | "parameters": { 170 | "DSServerInstance": { 171 | "value": "@pipeline().parameters.PSinkServerInstance", 172 | "type": "Expression" 173 | }, 174 | "DSDatabaseName": { 175 | "value": "@pipeline().parameters.PSinkDatabaseName", 176 | "type": "Expression" 177 | }, 178 | "DSSQLUser": { 179 | "value": "@pipeline().parameters.PSinkSQLUser", 180 | "type": "Expression" 181 | }, 182 | "DSSQLPassword": { 183 | "value": "@pipeline().parameters.PSinkSQLPassword", 184 | "type": "Expression" 185 | }, 186 | "DSTableName": { 187 | "value": "@{item().TableName}", 188 | "type": "Expression" 189 | } 190 | } 191 | } 192 | ] 193 | } 194 | ] 195 | } 196 | } 197 | ], 198 | "parameters": { 199 | "PSourceServerInstance": { 200 | "type": "String", 201 | "defaultValue": "YOUR SOURCE INSTANCE.database.windows.net" 202 | }, 203 | "PSourceDatabaseName": { 204 | "type": "String", 205 | "defaultValue": "AdventureWorksOLTP" 206 | }, 207 | "PSourceSQLUser": { 208 | "type": "String", 209 | "defaultValue": "SOURCE USERNAME" 210 | }, 211 | "PSourceSQLPassword": { 212 | "type": "String", 213 | "defaultValue": "SOURCE PASSWORD" 214 | }, 215 | "PSinkServerInstance": { 216 | "type": "String", 217 | "defaultValue": "YOUR TARGET INSTANCE.database.windows.net" 218 | }, 219 | "PSinkDatabaseName": { 220 | "type": "String", 221 | "defaultValue": "AdventureWorksOLTP" 222 | }, 223 | "PSinkSQLUser": { 224 | "type": "String", 225 | "defaultValue": "TARGET USERNAME" 226 | }, 227 | "PSinkSQLPassword": { 228 | "type": "String", 229 | "defaultValue": "TARGET PASSWORD" 230 | } 231 | } 232 | } 233 | } -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Metadata Driven Pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/Metadata Driven Pipeline.png -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Pipeline Level Params.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/Pipeline Level Params.png -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Pipeline at Runtime.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/Pipeline at Runtime.png -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Scenario Image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/Scenario Image.png -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Simple Pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "DynamicLinkedServices", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Copy Something", 7 | "type": "Copy", 8 | "policy": { 9 | "timeout": "7.00:00:00", 10 | "retry": 0, 11 | "retryIntervalInSeconds": 30, 12 | "secureOutput": false, 13 | "secureInput": false 14 | }, 15 | "typeProperties": { 16 | "source": { 17 | "type": "SqlSource" 18 | }, 19 | "sink": { 20 | "type": "SqlSink", 21 | "writeBatchSize": 10000 22 | }, 23 | "enableStaging": false, 24 | "dataIntegrationUnits": 0 25 | }, 26 | "inputs": [ 27 | { 28 | "referenceName": "GenericSQLTable", 29 | "type": "DatasetReference", 30 | "parameters": { 31 | "DSServerInstance": { 32 | "value": "@pipeline().parameters.SourceServerInstance", 33 | "type": "Expression" 34 | }, 35 | "DSDatabaseName": { 36 | "value": "@pipeline().parameters.SourceDatabaseName", 37 | "type": "Expression" 38 | }, 39 | "DSSQLUser": { 40 | "value": "@pipeline().parameters.SourceSQLUser", 41 | "type": "Expression" 42 | }, 43 | "DSSQLPassword": { 44 | "value": "@pipeline().parameters.SourceSQLPassword", 45 | "type": "Expression" 46 | }, 47 | "DSTableName": { 48 | "value": "@pipeline().parameters.SourceTableName", 49 | "type": "Expression" 50 | } 51 | } 52 | } 53 | ], 54 | "outputs": [ 55 | { 56 | "referenceName": "GenericSQLTable", 57 | "type": "DatasetReference", 58 | "parameters": { 59 | "DSServerInstance": { 60 | "value": "@pipeline().parameters.SinkServerInstance", 61 | "type": "Expression" 62 | }, 63 | "DSDatabaseName": { 64 | "value": "@pipeline().parameters.SinkDatabaseName", 65 | "type": "Expression" 66 | }, 67 | "DSSQLUser": { 68 | "value": "@pipeline().parameters.SinkSQLUser", 69 | "type": "Expression" 70 | }, 71 | "DSSQLPassword": { 72 | "value": "@pipeline().parameters.SinkSQLPassword", 73 | "type": "Expression" 74 | }, 75 | "DSTableName": { 76 | "value": "@pipeline().parameters.SinkTableName", 77 | "type": "Expression" 78 | } 79 | } 80 | } 81 | ] 82 | } 83 | ], 84 | "parameters": { 85 | "SourceServerInstance": { 86 | "type": "String" 87 | }, 88 | "SourceDatabaseName": { 89 | "type": "String" 90 | }, 91 | "SourceSQLUser": { 92 | "type": "String" 93 | }, 94 | "SourceSQLPassword": { 95 | "type": "String" 96 | }, 97 | "SourceTableName": { 98 | "type": "String" 99 | }, 100 | "SinkServerInstance": { 101 | "type": "String" 102 | }, 103 | "SinkDatabaseName": { 104 | "type": "String" 105 | }, 106 | "SinkSQLUser": { 107 | "type": "String" 108 | }, 109 | "SinkSQLPassword": { 110 | "type": "String" 111 | }, 112 | "SinkTableName": { 113 | "type": "String" 114 | } 115 | }, 116 | "folder": { 117 | "name": "Demo Pipelines" 118 | } 119 | } 120 | } -------------------------------------------------------------------------------- /Using Data Factory Parameterised Linked Services/Stored Proc Settings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrpaulandrew/BlogSupportingContent/aa90203a4dcf02b17f06bab75239094642147e33/Using Data Factory Parameterised Linked Services/Stored Proc Settings.png --------------------------------------------------------------------------------