├── DOTNET
├── EnglishWords.cs
├── LogUserInvocation.cs
├── ValidateJSONSchema.cs
└── index.json
├── LICENSE
├── PYTHON
├── DataManipulation
│ ├── manipulate_data_with_pandas.py
│ ├── mask_credit_card.py
│ ├── sentiment_analysis.py
│ ├── transform_data_with_numpy.py
│ └── vectorize_strings.py
├── Lakehouse
│ ├── query_data_from_tables.py
│ ├── read_csv_file_from_lakehouse.py
│ ├── read_parquet_from_lakehouse.py
│ └── write_csv_file_in_lakehouse.py
├── SQLDB
│ ├── read_from_sql_db.py
│ ├── write_many_rows_to_sql_db.py
│ └── write_one_row_to_sql_db.py
├── UDFDataTypes
│ ├── raise_userthrownerror.py
│ └── use_userdatafunctioncontext.py
├── Warehouse
│ ├── export_warehouse_data_to_lakehouse.py
│ └── query_data_from_warehouse.py
└── index.json
├── README.md
├── SECURITY.md
├── Templates
├── Dotnet
│ └── UDF
│ │ └── HelloFabric
│ │ ├── Deploy.zip
│ │ ├── FabricFunctions.cs
│ │ ├── SourceCode.zip
│ │ └── functions.metadata
├── Python
│ └── UDF
│ │ └── HelloFabric
│ │ ├── Deploy.zip
│ │ ├── SourceCode.zip
│ │ ├── function_app.py
│ │ └── functions.metadata
└── Version.xml
├── Version.xml
└── image.png
/DOTNET/EnglishWords.cs:
--------------------------------------------------------------------------------
1 |
2 | /* Description
3 | Use an English language dictionary to get random words https://www.nuget.org/packages/DictionaryLib_Calvin_Hsia
4 | Contains 2 English Language Dictionaries: small (53,000 words) and large (172,000 words)
5 | Allows you to see if a string is a valid English word. Also can generate random words. Works on Windows, Android
6 | Sample Android Phone word game https://github.com/calvinhsia/WordScape
7 | */
8 |
9 | /* How to use:
10 | Usings: No new 'usings' required
11 | PackageReference: Need to modify the HelloFabric.csproj to add this line:
12 |
13 | */
14 |
15 |
16 | private readonly DictionaryLib.DictionaryLib dict = new(DictionaryLib.DictionaryType.Large);
17 | [Function(nameof(RandWords))]
18 | public string RandWords(int? length)
19 | {
20 | if (!length.HasValue)
21 | {
22 | length = 5;
23 | }
24 | var randword = dict.RandomWord(); // gets a single random word
25 | var result = Enumerable.Range(0, length.Value).Select(i => dict.RandomWord()).ToList(); // gets a list of random words
26 | return string.Join(" ", result);
27 | }
28 |
--------------------------------------------------------------------------------
/DOTNET/LogUserInvocation.cs:
--------------------------------------------------------------------------------
1 | /*
2 |
3 | */
4 | [Function(nameof(LogUserInvocation))]
5 | public string LogUserInvocation([UserDataFunctionContext] UserDataFunctionContext context,
6 | [FabricItemInput("InvocationLog")] SqlConnection invocationWarehouse, string invokedFrom)
7 | {
8 | _logger.LogInformation("C# Fabric data function is called.");
9 | WriteInvocationToLogDatabase(invokedFrom,context, invocationWarehouse);
10 |
11 | return $"User {context.ExecutingUser.PreferredUsername} ran function/pipeline at {DateTime.Now}!";
12 | }
13 |
14 | private void WriteInvocationToLogDatabase(string invokedFrom, UserDataFunctionContext context, SqlConnection logDatabase)
15 | {
16 | logDatabase.Open();
17 | string query = $"INSERT INTO InvocationLog.Invocations.Log VALUES ('{context.InvocationId}', '{context.ExecutingUser.PreferredUsername}', '{context.ExecutingUser.Oid}', '{invokedFrom}')";
18 | using SqlCommand command = new SqlCommand(query, logDatabase);
19 | var rows = command.ExecuteNonQuery();
20 | logDatabase.Close();
21 | }
22 |
--------------------------------------------------------------------------------
/DOTNET/ValidateJSONSchema.cs:
--------------------------------------------------------------------------------
1 |
2 | /* Description
3 | This sample validates a JSON string schema
4 | */
5 |
6 | [Function("ValidateJSONSchema")]
7 | public bool ValidateJSONSchema(string message) // Pass the JSON object as a string for message variable
8 | {
9 | _logger.LogInformation("Validating JSON Schema for message: {message}", message);
10 |
11 | //define the JSON schema
12 | JSchema schema = JSchema.Parse(@"{
13 | 'type': 'object',
14 | 'properties': {
15 | 'name': {'type':'string'},
16 | 'roles': {'type': 'array'}
17 | }
18 | }");
19 |
20 | bool valid= false;
21 | if (message != null)
22 | {
23 | JObject input = JObject.Parse(message);
24 | valid = input.IsValid(schema);
25 | }
26 | return valid;
27 | }
28 |
--------------------------------------------------------------------------------
/DOTNET/index.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name": "English Words Sample",
4 | "description": "Add English Word dictionary",
5 | "detail": "Query an English Word dictionary for random words or spell checking",
6 | "dateAdded": "2024-08-31T17:50:52.184Z",
7 | "data": "EnglishWords.cs"
8 | },
9 | {
10 | "name": "ValidateJsonSchema",
11 | "description": "Validate a JSON Schema",
12 | "detail": "validate a JSON Schema",
13 | "dateAdded": "2024-08-31T17:50:52.184Z",
14 | "data": "ValidateJSONSchema.cs"
15 | },
16 | {
17 | "name": "Log User Invocation",
18 | "description": "add log user to sql",
19 | "detail": "",
20 | "dateAdded": "2024-08-31T17:50:52.184Z",
21 | "data": "LogUserInvocation.cs"
22 | }
23 | ]
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Microsoft
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/PYTHON/DataManipulation/manipulate_data_with_pandas.py:
--------------------------------------------------------------------------------
1 | # This sample uses pandas to manipulate data for a given age group
2 | # Complete these steps before testing this function
3 | # 1. Select library management and add pandas library
4 | # Pass input as a list of objects, an example to use for this sample
5 | # [
6 | # {
7 | # "Name": "John",
8 | # "Age": 22,
9 | # "Gender": "male"
10 | # }
11 | # ]
12 |
13 | import pandas as pd
14 |
15 | @udf.function()
16 | def manipulate_data(data: list)-> list:
17 |
18 | # Convert the data dictionary to a DataFrame
19 | df = pd.DataFrame(data)
20 | # Perform basic data manipulation
21 | # Example: Add a new column 'AgeGroup' based on the 'Age' column
22 | df['AgeGroup'] = df['Age'].apply(lambda x: 'Adult' if x >= 18 else 'Minor')
23 |
24 | # Example: Filter rows where 'Age' is greater than 30
25 | # df_filtered = df[df["Age"] > 30]
26 |
27 | # Example: Group by 'AgeGroup' and calculate the mean age
28 | df_grouped = df.groupby("AgeGroup")["Age"].mean().reset_index()
29 |
30 | return df_grouped.to_json(orient='records')
31 |
--------------------------------------------------------------------------------
/PYTHON/DataManipulation/mask_credit_card.py:
--------------------------------------------------------------------------------
1 | # This sample allows you to pass a credit card as integer and mask the card leaving the last 4 digits.
2 |
3 |
4 | @udf.function()
5 | def mask_credit_card(card_number: int)-> str:
6 | # Convert the card number to a string
7 | card_number_str = str(card_number)
8 |
9 | # Check if the card number is valid
10 | if not card_number_str.isdigit() or not (13 <= len(card_number_str) <= 19):
11 | raise ValueError("Invalid credit card number")
12 |
13 | # Mask all but the last four digits
14 | masked_number = '*' * (len(card_number_str) - 4) + card_number_str[-4:]
15 |
16 | return str(masked_number)
17 |
--------------------------------------------------------------------------------
/PYTHON/DataManipulation/sentiment_analysis.py:
--------------------------------------------------------------------------------
1 | # This sample allows you to calculate the sentiment of given text that is passed to the function as input.
2 | # Complete these steps before testing this funtion
3 | # 1. Select library management and add textblob library
4 |
5 | from textblob import TextBlob
6 |
7 | @udf.function()
8 | def analyze_sentiment(text: str) -> str:
9 | sentimentscore= TextBlob(text).sentiment.polarity
10 | sentiment= "N/A"
11 | # Classify sentiment based on polarity value
12 | if sentimentscore > 0.1:
13 | sentiment= "Happy"
14 | elif sentimentscore < -0.1:
15 | sentiment="Sad"
16 | else:
17 | sentiment="Neutral"
18 |
19 | return f"Sentiment for {text} is {sentiment}"
20 |
--------------------------------------------------------------------------------
/PYTHON/DataManipulation/transform_data_with_numpy.py:
--------------------------------------------------------------------------------
1 | # This samples converts the input 1D list to a numpy array. The output is normalized to the range [0, 1] and we calculate the mean.
2 | # Complete these steps before testing this function
3 | # 1. Select 'Library management' and add numpy library
4 | # 2. Pass input as a list, an example to use for this sample:
5 | # [1, 2, 3, 4, 5]
6 |
7 | import numpy as np
8 | import json
9 |
10 | @udf.function()
11 | def transform_data(data: list)-> dict:
12 |
13 | # Convert the 1D list to a numpy array
14 | np_data = np.array(data)
15 |
16 | # Normalize the data (scale values to range [0, 1])
17 | min_vals = np.min(np_data, axis=0)
18 | max_vals = np.max(np_data, axis=0)
19 | normalized_data = (np_data - min_vals) / (max_vals - min_vals)
20 | # Calculate the mean of each column
21 | column_means = np.mean(np_data, axis=0)
22 | norm = np.array(normalized_data)
23 |
24 | return { "NormalizedData": norm.tolist(), "Mean": float(column_means) }
25 |
--------------------------------------------------------------------------------
/PYTHON/DataManipulation/vectorize_strings.py:
--------------------------------------------------------------------------------
1 | # This sample vectorizes a string of text and returns a vectorized representation of the string.
2 | # Complete the following before testing this function
3 | # 1. Select 'Library management' and add "scikit-learn" library
4 |
5 |
6 | from sklearn.feature_extraction.text import CountVectorizer
7 |
8 | @udf.function()
9 | def vectorize_string(text: str)-> str:
10 | try:
11 | # Initialize the CountVectorizer
12 | vectorizer = CountVectorizer()
13 |
14 | # Fit and transform the input text to vectorize it
15 | vectorized_text = vectorizer.fit_transform([text])
16 | vectors = ''.join(str(x) for x in vectorized_text.toarray())
17 | featurenames= " ,".join(str(x) for x in vectorizer.get_feature_names_out())
18 | print("Vectorized text:\n", vectorized_text.toarray())
19 | print("Feature names:\n",vectorizer.get_feature_names_out())
20 | return "vectorized_text: " + vectors + "\nfeature_names: " + featurenames
21 | except Exception as e:
22 | return "An error occurred during vectorization: " + str(e)
23 |
--------------------------------------------------------------------------------
/PYTHON/Lakehouse/query_data_from_tables.py:
--------------------------------------------------------------------------------
1 |
2 | # This sample reads data from a table in a lakehouse
3 | # Complete these steps before testing this function
4 | # 1. Select 'Manage connections' and add a connection to a Lakehouse
5 |
6 | import datetime
7 |
8 | # Replace the alias "" with your connection alias.
9 | @udf.connection(argName="myLakehouse", alias="")
10 | @udf.function()
11 | def query_data_from_tables(myLakehouse: fn.FabricLakehouseClient) -> list:
12 | # Connect to the Lakehouse SQL Endpoint
13 | connection = myLakehouse.connectToSql()
14 |
15 | # Use connection to execute a query
16 | cursor = connection.cursor()
17 | cursor.execute(f"SELECT * FROM (VALUES ('John Smith', 31) , ('Kayla Jones', 33)) AS Employee(EmpName, DepID);")
18 |
19 | rows = [x for x in cursor]
20 | columnNames = [x[0] for x in cursor.description]
21 |
22 | # Turn the rows into a json object
23 | values = []
24 | for row in rows:
25 | item = {}
26 | for prop, val in zip(columnNames, row):
27 | if isinstance(val, (datetime.date, datetime.datetime)):
28 | val = val.isoformat()
29 | item[prop] = val
30 | values.append(item)
31 |
32 | # Close the connection
33 | cursor.close()
34 | connection.close()
35 |
36 | return values
37 |
38 |
--------------------------------------------------------------------------------
/PYTHON/Lakehouse/read_csv_file_from_lakehouse.py:
--------------------------------------------------------------------------------
1 |
2 | # This sample reads a CSV file from a lakehouse using pandas. Function takes file name as an input parameter
3 | # Complete these steps before testing this funtion
4 | # 1. Select 'Manage connections' and add a connection to a Lakehouse
5 | # 2. Select 'Library management' and add pandas library
6 |
7 | import pandas as pd
8 |
9 | # Replace the alias "" with your connection alias.
10 | @udf.connection(argName="myLakehouse", alias="")
11 | @udf.function()
12 | def read_csv_from_lakehouse(myLakehouse: fn.FabricLakehouseClient, csvFileName: str) -> str:
13 |
14 | # Connect to the Lakehouse
15 | connection = myLakehouse.connectToFiles()
16 |
17 | # Download the CSV file from the Lakehouse
18 | csvFile = connection.get_file_client(csvFileName)
19 | downloadFile=csvFile.download_file()
20 | csvData = downloadFile.readall()
21 |
22 | # Read the CSV data into a pandas DataFrame
23 | from io import StringIO
24 | df = pd.read_csv(StringIO(csvData.decode('utf-8')))
25 |
26 | # Display the DataFrame
27 | result=""
28 | for index, row in df.iterrows():
29 | result=result + "["+ (",".join([str(item) for item in row]))+"]"
30 |
31 | # Close the connection
32 | csvFile.close()
33 | connection.close()
34 |
35 | return f"CSV file read successfully.{result}"
36 |
--------------------------------------------------------------------------------
/PYTHON/Lakehouse/read_parquet_from_lakehouse.py:
--------------------------------------------------------------------------------
1 | # This sample reads a parquet file from a lakehouse
2 | # Complete these steps before testing this function
3 | # 1. Select 'Manage connections' and add a connection to a Lakehouse which has a parquet file
4 | # 2. Select 'Library management' and add the pyarrow, pandas libraries
5 | # 3. Replace the alias "" with your connection alias.
6 |
7 | import pandas as pd
8 | from io import BytesIO
9 | import pyarrow.parquet as pq # This is engine needed to read parquet files
10 |
11 | # Replace the alias "" with your connection alias.
12 | @udf.connection(argName="myLakehouse", alias="")
13 | @udf.function()
14 | def read_parquet_from_lakehouse(myLakehouse: fn.FabricLakehouseClient, parquetFileName: str) -> str:
15 |
16 | # Connect to the Lakehouse
17 | connection = myLakehouse.connectToFiles()
18 |
19 | # Download the Parquet file from the Lakehouse
20 | # If relative path is "Files/myfile.parquet , then parquetFileName = "myfile.parquet"
21 | # If relative path is "Files/Folder1/myfile.parquet , then parquetFileName = "Folder1/myfile.parquet"
22 | parquetFile = connection.get_file_client(parquetFileName)
23 | downloadFile = parquetFile.download_file()
24 | parquetData = downloadFile.readall()
25 |
26 | # Read the Parquet data into a pandas DataFrame
27 | df = pd.read_parquet(BytesIO(parquetData))
28 |
29 | # Display the DataFrame
30 | rows = []
31 | for index, row in df.iterrows():
32 | rows.append("[" + (",".join([str(item) for item in row])) + "]")
33 | result = "".join(rows)
34 |
35 | # Close the connection
36 | parquetFile.close()
37 | connection.close()
38 |
39 | return f"Parquet file read successfully.{result}"
40 |
--------------------------------------------------------------------------------
/PYTHON/Lakehouse/write_csv_file_in_lakehouse.py:
--------------------------------------------------------------------------------
1 |
2 | # This sample writes a CSV file from a lakehouse using pandas
3 | # Complete these steps before testing this funtion
4 | # 1. Select 'Manage connections' and add a connection to a Lakehouse
5 | # 2. Select 'Library management' and add pandas library
6 | # 3. Sample input for employees:
7 | # [[1,"John Smith", 31], [2,"Kayla Jones", 33]]
8 |
9 | import pandas as pd
10 | import datetime
11 |
12 | #Replace the alias "" with your connection alias.
13 | @udf.connection(argName="myLakehouse", alias="")
14 | @udf.function()
15 | def write_csv_file_in_lakehouse(myLakehouse: fn.FabricLakehouseClient, employees: list)-> str:
16 | csvFileName = "Employees" + str(round(datetime.datetime.now().timestamp())) + ".csv"
17 |
18 | # Convert the data to a DataFrame
19 | df = pd.DataFrame(employees, columns=['ID','EmpName', 'DepID'])
20 | # Write the DataFrame to a CSV file
21 | csv_string = df.to_csv(index=False)
22 |
23 | # Upload the CSV file to the Lakehouse
24 | connection = myLakehouse.connectToFiles()
25 | csvFile = connection.get_file_client(csvFileName)
26 |
27 | csvFile.upload_data(csv_string, overwrite=True)
28 |
29 | csvFile.close()
30 | connection.close()
31 | return f"File {csvFileName} was written to the Lakehouse. Open the Lakehouse in https://app.fabric.microsoft.com to view the files"
32 |
--------------------------------------------------------------------------------
/PYTHON/SQLDB/read_from_sql_db.py:
--------------------------------------------------------------------------------
1 | # This sample allows you to read data from a Fabric SQL Database
2 | # Complete these steps before testing this function
3 | # 1. Select 'Manage connections' and add a connection to a Fabric SQL Database
4 | # 2. Copy the Alias name and replace it below inside the @udf.connection() decorator.
5 |
6 |
7 | @udf.connection(argName="sqlDB",alias="")
8 | @udf.function()
9 | def read_from_sql_db(sqlDB: fn.FabricSqlConnection)-> list:
10 | # Replace with the query you want to run
11 | query = "SELECT * FROM (VALUES ('John Smith', 31), ('Kayla Jones', 33)) AS Employee(EmpName, DepID);"
12 |
13 | # Establish a connection to the SQL database
14 | connection = sqlDB.connect()
15 | cursor = connection.cursor()
16 |
17 | # Execute the query
18 | cursor.execute(query)
19 |
20 | # Fetch all results
21 | results = []
22 | for row in cursor.fetchall():
23 | results.append(row)
24 |
25 | # Close the connection
26 | cursor.close()
27 | connection.close()
28 |
29 | return results
30 |
31 |
32 |
--------------------------------------------------------------------------------
/PYTHON/SQLDB/write_many_rows_to_sql_db.py:
--------------------------------------------------------------------------------
1 | # This sample allows you to write multiple rows of data into a Fabric SQL Database
2 | # Complete these steps before testing this function:
3 | # 1. Select 'Manage connections' and add a connection to a Fabric SQL Database
4 | # 2. Copy the Alias name and replace it inside the @udf.connection() decorator.
5 |
6 | @udf.connection(argName="sqlDB",alias="")
7 | @udf.function()
8 | def write_many_to_sql_db(sqlDB: fn.FabricSqlConnection) -> str:
9 | # Replace with the data you want to insert
10 | data = [(1,"John Smith", 31), (2,"Kayla Jones", 33),(3,"Edward Harris", 33)]
11 |
12 | # Establish a connection to the SQL database
13 | connection = sqlDB.connect()
14 | cursor = connection.cursor()
15 |
16 | # Create the table if it doesn't exist
17 | create_table_query = """
18 | IF OBJECT_ID(N'dbo.Employee', N'U') IS NULL
19 | CREATE TABLE dbo.Employee (
20 | EmpID INT PRIMARY KEY,
21 | EmpName nvarchar(50),
22 | DepID INT
23 | );
24 | """
25 | cursor.execute(create_table_query)
26 |
27 | # Insert data into the table
28 | insert_query = "INSERT INTO Employee (EmpID, EmpName, DepID) VALUES (?, ?, ?);"
29 | cursor.executemany(insert_query, data)
30 |
31 | # Commit the transaction
32 | connection.commit()
33 |
34 | # Close the connection
35 | cursor.close()
36 | connection.close()
37 | return "Employee table was created (if necessary) and data was added to this table"
38 |
--------------------------------------------------------------------------------
/PYTHON/SQLDB/write_one_row_to_sql_db.py:
--------------------------------------------------------------------------------
1 | # Write one row of data into a table in SQL database
2 | # This sample allows you to write one row of data into a Fabric SQL Database
3 | # Complete these steps before testing this function:
4 | # 1. Select 'Manage connections' and add a connection to a Fabric SQL Database
5 | # 2. Copy the Alias name and replace it inside the @udf.connection() decorator.
6 |
7 | @udf.connection(argName="sqlDB",alias="")
8 | @udf.function()
9 | def write_one_to_sql_db(sqlDB: fn.FabricSqlConnection, employeeId: int, employeeName: str, deptId: int) -> str:
10 | # Replace with the data you want to insert
11 | data = (employeeId, employeeName, deptId)
12 |
13 | # Establish a connection to the SQL database
14 | connection = sqlDB.connect()
15 | cursor = connection.cursor()
16 |
17 | # Create the table if it doesn't exist
18 | create_table_query = """
19 | IF OBJECT_ID(N'dbo.Employee', N'U') IS NULL
20 | CREATE TABLE dbo.Employee (
21 | EmpID INT PRIMARY KEY,
22 | EmpName nvarchar(50),
23 | DepID INT
24 | );
25 | """
26 | cursor.execute(create_table_query)
27 |
28 | # Insert data into the table
29 | insert_query = "INSERT INTO Employee (EmpID, EmpName, DepID) VALUES (?, ?, ?);"
30 | cursor.execute(insert_query, data)
31 |
32 | # Commit the transaction
33 | connection.commit()
34 |
35 | # Close the connection
36 | cursor.close()
37 | connection.close()
38 | return "Employee table was created (if necessary) and data was added to this table"
39 |
--------------------------------------------------------------------------------
/PYTHON/UDFDataTypes/raise_userthrownerror.py:
--------------------------------------------------------------------------------
1 | # This sample function raises a UserThrownError if the age given is less than 18.
2 | # The UserThrownError is a special type of error that can be used (or extended with a custom error class)
3 | # to make an invocation fail and allow context to be provided about why the function failed.
4 |
5 | import datetime
6 |
7 | @udf.function()
8 | def raise_userthrownerror(age: int)-> str:
9 | if age < 18:
10 | raise fn.UserThrownError("You must be 18 years or older to use this service.", {"age": age})
11 |
12 | return f"Welcome to Fabric Functions at {datetime.datetime.now()}!"
13 |
--------------------------------------------------------------------------------
/PYTHON/UDFDataTypes/use_userdatafunctioncontext.py:
--------------------------------------------------------------------------------
1 | # This sample uses a parameter with data type UserDataFunctionContext,
2 | # which is a parameter that contains certain metadata about the function
3 | # invocation, such as the invocation id and some properties of the token
4 | # used to invoke the function.
5 |
6 | import datetime
7 |
8 | @udf.context(argName="udfContext")
9 | @udf.function()
10 | def get_function_invocation_details(udfContext: fn.UserDataFunctionContext)-> str:
11 | invocation_id = udfContext.invocation_id
12 | invoking_users_username = udfContext.executing_user['PreferredUsername']
13 | # Other executing_user keys include: 'Oid', 'TenantId'
14 |
15 | return f"Welcome to Fabric Functions, {invoking_users_username}, at {datetime.datetime.now()}! Invocation ID: {invocation_id}"
16 |
--------------------------------------------------------------------------------
/PYTHON/Warehouse/export_warehouse_data_to_lakehouse.py:
--------------------------------------------------------------------------------
1 | # This sample allows you to export data from warehouse and write it to a lakehouse
2 | # Complete these steps before testing this funtion
3 | # 1. Select 'Manage connections' to connect to Warehouse and the Lakehouse you want to use.
4 | # 2. Copy the Alias name and replace below
5 |
6 | import datetime
7 |
8 | @udf.connection(argName="myWarehouse", alias="")
9 | @udf.connection(argName="myLakehouse", alias="")
10 | @udf.function()
11 | def export_warehouse_data_to_lakehouse(myWarehouse: fn.FabricSqlConnection, myLakehouse: fn.FabricLakehouseClient) -> dict:
12 |
13 | whSqlConnection = myWarehouse.connect()
14 |
15 | cursor = whSqlConnection.cursor()
16 | cursor.execute(f"SELECT * FROM (VALUES ('John Smith', 31) , ('Kayla Jones', 33)) AS Employee(EmpName, DepID);")
17 |
18 | rows = [x for x in cursor]
19 | columnNames = [x[0] for x in cursor.description]
20 | csvRows = []
21 | csvRows.append(','.join(columnNames))
22 |
23 | # Turn the rows into comma separated values, and then upload it to Employees.csv
24 | for row in rows:
25 | csvRows.append(','.join(map(str, row)))
26 |
27 | lhFileConnection = myLakehouse.connectToFiles()
28 | csvFileName = "Employees" + str(round(datetime.datetime.now().timestamp())) + ".csv"
29 | csvFile = lhFileConnection.get_file_client(csvFileName)
30 | csvFile.upload_data('\n'.join(csvRows), overwrite=True)
31 |
32 | # Turn the rows into a json object
33 | values = []
34 |
35 | for row in rows:
36 | item = {}
37 | for prop, val in zip(columnNames, row):
38 | if isinstance(val, (datetime.datetime, datetime.date)):
39 | val = val.isoformat()
40 | item[prop] = val
41 | values.append(item)
42 |
43 | cursor.close()
44 | whSqlConnection.close()
45 | csvFile.close()
46 | lhFileConnection.close()
47 |
48 | return {"message": "File {} is written to {} Lakehouse. You can delete it from the Lakehouse after trying this sample.".format(csvFileName, myLakehouse.alias_name),
49 | "values": values}
50 |
--------------------------------------------------------------------------------
/PYTHON/Warehouse/query_data_from_warehouse.py:
--------------------------------------------------------------------------------
1 | # Example of using a connection to query a Warehouse
2 | # Complete these steps before testing this funtion
3 | # 1. Select 'Manage connections' to connect to a Warehouse
4 | # 2. Copy the Alias name and replace below
5 |
6 | import datetime
7 |
8 | @udf.connection(argName="myWarehouse", alias="")
9 | @udf.function()
10 | def query_data_from_warehouse(myWarehouse: fn.FabricSqlConnection) -> list:
11 |
12 | whSqlConnection = myWarehouse.connect()
13 | # Use connection to execute a query
14 | cursor = whSqlConnection.cursor()
15 | cursor.execute(f"SELECT * FROM (VALUES ('John Smith', 31) , ('Kayla Jones', 33)) AS Employee(EmpName, DepID);")
16 |
17 | rows = [x for x in cursor]
18 | columnNames = [x[0] for x in cursor.description]
19 | # Turn the rows into a json object
20 | values = []
21 | for row in rows:
22 | item = {}
23 | for prop, val in zip(columnNames, row):
24 | if isinstance(val, (datetime.date, datetime.datetime)):
25 | val = val.isoformat()
26 | item[prop] = val
27 | values.append(item)
28 |
29 | cursor.close()
30 | whSqlConnection.close()
31 |
32 | return values
33 |
--------------------------------------------------------------------------------
/PYTHON/index.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name":"Warehouse",
4 | "description":"Sample functions to read from and write data to a warehouse in Fabric.",
5 | "data":[
6 | {
7 | "name":"Export data from warehouse into a lakehouse",
8 | "description":"Use this user data function that writes data from a warehouse into a csv file and saves it in a lakehouse",
9 | "detail":"Add a warehouse and lakehouse connection. Then use this sample function that writes data from a warehouse into csv file and saves it in a lakehouse",
10 | "dateAdded":"2024-11-05T00:00:00Z",
11 | "data":"Warehouse/export_warehouse_data_to_lakehouse.py"
12 | },
13 | {
14 | "name":"Query data from a warehouse",
15 | "description":"Use this user data function to query data from a warehouse.",
16 | "detail":"Add a warehouse connection to the user data function and then use the function to query data from a warehouse.",
17 | "dateAdded":"2024-11-05T00:00:00Z",
18 | "data":"Warehouse/query_data_from_warehouse.py"
19 | }
20 | ]
21 | },
22 | {
23 | "name":"Lakehouse",
24 | "description":"Sample functions to work with tables and files within a lakehouse in Fabric.",
25 | "data":[
26 | {
27 | "name":"Read a parquet csv file from a lakehouse",
28 | "description":"This sample writes a parquet file from a lakehouse.",
29 | "dateAdded":"2025-30-04T00:00:00Z",
30 | "data":"Lakehouse/read_parquet_from_lakehouse.py"
31 | },
32 | {
33 | "name":"Write csv file into a lakehouse",
34 | "description":"This sample writes a CSV file into a lakehouse using pandas.",
35 | "dateAdded":"2024-11-05T00:00:00Z",
36 | "data":"Lakehouse/write_csv_file_in_lakehouse.py"
37 | },
38 | {
39 | "name":"Read csv file from lakehouse",
40 | "description":"This sample reads a CSV file from a lakehouse using pandas. Function takes file name as an input parameter.",
41 | "dateAdded":"2024-11-05T00:00:00Z",
42 | "data":"Lakehouse/read_csv_file_from_lakehouse.py"
43 | },
44 | {
45 | "name":"Query data from lakehouse tables",
46 | "description":"This sample reads data from a table in a lakehouse.",
47 | "dateAdded":"2024-11-05T00:00:00Z",
48 | "data":"Lakehouse/query_data_from_tables.py"
49 | }
50 | ]
51 | },
52 | {
53 | "name":"SQL Database",
54 | "description":"Sample functions to work with SQL database.",
55 | "data":[
56 | {
57 | "name":"Write multiple rows of data into a table in SQL database",
58 | "description":"This sample allows you to write multiple rows of data into a SQL database.",
59 | "dateAdded":"2024-11-05T00:00:00Z",
60 | "data":"SQLDB/write_many_rows_to_sql_db.py"
61 | },
62 | {
63 | "name":"Write one row of data into a table in SQL database",
64 | "description":"This sample allows you to write one row of data into a SQL database.",
65 | "dateAdded":"2025-01-14T00:00:00Z",
66 | "data":"SQLDB/write_one_row_to_sql_db.py"
67 | },
68 | {
69 | "name":"Read data from a table in SQL database",
70 | "description":"This sample allows you to read data from SQL database.",
71 | "dateAdded":"2024-11-05T00:00:00Z",
72 | "data":"SQLDB/read_from_sql_db.py"
73 | }
74 | ]
75 | },
76 | {
77 | "name":"Data Manipulation",
78 | "description":"Sample functions to transform data using pandas,numpy.",
79 | "data":[
80 | {
81 | "name":"Manipulate data with pandas library",
82 | "description":"This sample uses pandas to manipulate a given dataset to group people by age.",
83 | "dateAdded":"2024-11-05T00:00:00Z",
84 | "data":"DataManipulation/manipulate_data_with_pandas.py"
85 | },
86 | {
87 | "name":"Transform data with numpy library",
88 | "description":"This samples converts the input 1D list to a numpy array. The output is normalized to the range [0, 1] and we calculate the mean.",
89 | "dateAdded":"2024-11-05T00:00:00Z",
90 | "data":"DataManipulation/transform_data_with_numpy.py"
91 | }
92 | ]
93 | },
94 | {
95 | "name":"UDF Data Types",
96 | "description":"Sample functions to show usage of data types supplied within the SDK library.",
97 | "data":[
98 | {
99 | "name":"Using UserDataFunctionContext",
100 | "description":"This sample uses UserDataFunctionContext to get metadata about the invocation.",
101 | "dateAdded":"2025-01-17T00:00:00Z",
102 | "data":"UDFDataTypes/use_userdatafunctioncontext.py"
103 | },
104 | {
105 | "name":"Raising UserThrownError",
106 | "description":"This samples raises an error if the age supplied as a parameter is < 18.",
107 | "dateAdded":"2025-01-17T00:00:00Z",
108 | "data":"UDFDataTypes/raise_userthrownerror.py"
109 | }
110 | ]
111 | }
112 | ]
113 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Fabric User data functions Samples
2 | You can find these sample functions that can be used in Fabric. These functions can also be found in Fabric portal.
3 | Samples can be found in DOTNET, PYTHON folders based on the language you want to work with.
4 |
5 | ## Index.json
6 | Index.json defines all the functions that are listed when creating a function in VS Code or Fabric portal. Each tree level is a QuickPick input for the user:
7 |
8 | 
9 |
10 | index.JSON Format:
11 |
12 | ```typescript
13 | export interface ISampleFunction {
14 | name: string; // shown in BOLD on 1st line of QuickPick Item
15 | description: string; // shown at end of 1st line of QuickPick Item
16 | detail?: string; // shown on 2nd line of QuickPick Item
17 | dateAdded?: string; // date added to the repo, so we can sort/filter, like '2024-08-31T17:50:52.184Z'
18 | tag?: string; // additional tag to filter on
19 | data: ISampleFunction[] | string; // if string, it's the full relative path file name from root to download. Else it's an array of ISampleFunction
20 | }
21 | ```
22 |
23 | Additional filtering, sorting by user may be added in the future with tag, dateAdded
24 |
25 | NOTE: These are not complete runnable samples. They are snippets that are inserted in User data function item in Fabric.
26 |
27 | ## Contributing
28 |
29 | You can contibute to more function samples here. Follow the structure
30 | - Start with a comment block describing the code, indicating any changes in usings/packagereferences or imports/requirements.txt. Keep in mind that this code can be inserted from the Fabric Portal or from VSCode.
31 | - Fabric User data function code snippet
32 |
33 | ### DOTNET Samples
34 | Indent the sample by 8 spaces: the text will be inserted in the FabricFunctions.cs before the last 2 closing braces
35 |
36 | ### PYTHON Samples
37 | The text will be inserted at the end of the function_app.py file
38 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Security
4 |
5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet) and [Xamarin](https://github.com/xamarin).
6 |
7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/security.md/definition), please report it to us as described below.
8 |
9 | ## Reporting Security Issues
10 |
11 | **Please do not report security vulnerabilities through public GitHub issues.**
12 |
13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/security.md/msrc/create-report).
14 |
15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/security.md/msrc/pgp).
16 |
17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
18 |
19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
20 |
21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
22 | * Full paths of source file(s) related to the manifestation of the issue
23 | * The location of the affected source code (tag/branch/commit or direct URL)
24 | * Any special configuration required to reproduce the issue
25 | * Step-by-step instructions to reproduce the issue
26 | * Proof-of-concept or exploit code (if possible)
27 | * Impact of the issue, including how an attacker might exploit the issue
28 |
29 | This information will help us triage your report more quickly.
30 |
31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/security.md/msrc/bounty) page for more details about our active programs.
32 |
33 | ## Preferred Languages
34 |
35 | We prefer all communications to be in English.
36 |
37 | ## Policy
38 |
39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/security.md/cvd).
40 |
41 |
42 |
--------------------------------------------------------------------------------
/Templates/Dotnet/UDF/HelloFabric/Deploy.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/microsoft/fabric-user-data-functions-samples/33a75347b253d5dd6bc9b3083fa0ccd358265ef6/Templates/Dotnet/UDF/HelloFabric/Deploy.zip
--------------------------------------------------------------------------------
/Templates/Dotnet/UDF/HelloFabric/FabricFunctions.cs:
--------------------------------------------------------------------------------
1 | using Microsoft.Azure.Functions.Worker;
2 | using Microsoft.Extensions.Logging;
3 |
4 | namespace HelloFabric
5 | {
6 | public class FabricFunctions
7 | {
8 | private readonly ILogger _logger;
9 |
10 | public FabricFunctions(ILoggerFactory loggerFactory)
11 | {
12 | _logger = loggerFactory.CreateLogger();
13 | }
14 |
15 | [Function(nameof(HelloFabric))]
16 | public string HelloFabric(string name)
17 | {
18 | _logger.LogInformation("C# Fabric data function is called.");
19 |
20 | return $"Welcome to Fabric Functions, {name}, at {DateTime.Now}!";
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Templates/Dotnet/UDF/HelloFabric/SourceCode.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/microsoft/fabric-user-data-functions-samples/33a75347b253d5dd6bc9b3083fa0ccd358265ef6/Templates/Dotnet/UDF/HelloFabric/SourceCode.zip
--------------------------------------------------------------------------------
/Templates/Dotnet/UDF/HelloFabric/functions.metadata:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name": "HelloFabric",
4 | "scriptFile": "HelloFabric.dll",
5 | "entryPoint": "HelloFabric.FabricFunctions.HelloFabric",
6 | "language": "dotnet-isolated",
7 | "properties": {
8 | "IsCodeless": false
9 | },
10 | "bindings": [
11 | {
12 | "name": "data",
13 | "direction": "In",
14 | "authLevel": "Anonymous",
15 | "fabricBinding": true,
16 | "type": "httpTrigger",
17 | "methods": [
18 | "post"
19 | ]
20 | },
21 | {
22 | "name": "$return",
23 | "direction": "Out",
24 | "type": "http",
25 | "fabricBinding": true
26 | }
27 | ],
28 | "fabricProperties": {
29 | "fabricFunctionParameters": [
30 | {
31 | "dataType": "String",
32 | "name": "name"
33 | }
34 | ],
35 | "fabricFunctionReturnType": "String"
36 | }
37 | }
38 | ]
--------------------------------------------------------------------------------
/Templates/Python/UDF/HelloFabric/Deploy.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/microsoft/fabric-user-data-functions-samples/33a75347b253d5dd6bc9b3083fa0ccd358265ef6/Templates/Python/UDF/HelloFabric/Deploy.zip
--------------------------------------------------------------------------------
/Templates/Python/UDF/HelloFabric/SourceCode.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/microsoft/fabric-user-data-functions-samples/33a75347b253d5dd6bc9b3083fa0ccd358265ef6/Templates/Python/UDF/HelloFabric/SourceCode.zip
--------------------------------------------------------------------------------
/Templates/Python/UDF/HelloFabric/function_app.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import fabric.functions as fn
3 | import logging
4 |
5 | udf = fn.UserDataFunctions()
6 |
7 | @udf.function()
8 | def hello_fabric(name: str) -> str:
9 | logging.info('Python UDF trigger function processed a request.')
10 |
11 | return f"Welcome to Fabric Functions, {name}, at {datetime.datetime.now()}!"
12 |
--------------------------------------------------------------------------------
/Templates/Python/UDF/HelloFabric/functions.metadata:
--------------------------------------------------------------------------------
1 | [{"name": "hello_fabric", "scriptFile": "function_app.py", "bindings": [{"name": "req", "direction": "In", "type": "httpTrigger", "methods": ["post"], "route": "hello_fabric", "authLevel": "Anonymous"}], "fabricProperties": {"fabricFunctionReturnType": "str", "fabricFunctionParameters": [{"name": "name", "dataType": "str"}]}}]
--------------------------------------------------------------------------------
/Templates/Version.xml:
--------------------------------------------------------------------------------
1 |
2 | 2024.12.12.1
--------------------------------------------------------------------------------
/Version.xml:
--------------------------------------------------------------------------------
1 |
2 | 2025.3.18.1
3 |
--------------------------------------------------------------------------------
/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/microsoft/fabric-user-data-functions-samples/33a75347b253d5dd6bc9b3083fa0ccd358265ef6/image.png
--------------------------------------------------------------------------------