├── SigmaHttpTrigger ├── sample.dat ├── config.ini ├── function.json └── __init__.py ├── .funcignore ├── proxies.json ├── .vscode ├── extensions.json ├── launch.json ├── settings.json └── tasks.json ├── requirements.txt ├── SigmaTimerTrigger ├── function.json └── __init__.py ├── host.json ├── LICENSE ├── .gitignore └── README.md /SigmaHttpTrigger/sample.dat: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Azure" 3 | } -------------------------------------------------------------------------------- /.funcignore: -------------------------------------------------------------------------------- 1 | .git* 2 | .vscode 3 | local.settings.json 4 | test 5 | .venv -------------------------------------------------------------------------------- /SigmaHttpTrigger/config.ini: -------------------------------------------------------------------------------- 1 | [sec-database] 2 | database_username = '' 3 | database_password = '' -------------------------------------------------------------------------------- /proxies.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json.schemastore.org/proxies", 3 | "proxies": {} 4 | } 5 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "ms-azuretools.vscode-azurefunctions", 4 | "ms-python.python" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # DO NOT include azure-functions-worker in this file 2 | # The Python Worker is managed by Azure Functions platform 3 | # Manually managing azure-functions-worker may cause unexpected issues 4 | 5 | azure-functions 6 | pyodbc -------------------------------------------------------------------------------- /SigmaTimerTrigger/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "mytimer", 6 | "type": "timerTrigger", 7 | "direction": "in", 8 | "schedule": "*/20 * * * * *" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Attach to Python Functions", 6 | "type": "python", 7 | "request": "attach", 8 | "port": 9091, 9 | "preLaunchTask": "func: host start" 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "logging": { 4 | "applicationInsights": { 5 | "samplingSettings": { 6 | "isEnabled": true, 7 | "excludedTypes": "Request" 8 | } 9 | } 10 | }, 11 | "extensionBundle": { 12 | "id": "Microsoft.Azure.Functions.ExtensionBundle", 13 | "version": "[1.*, 2.0.0)" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "azureFunctions.deploySubpath": ".", 3 | "azureFunctions.scmDoBuildDuringDeployment": true, 4 | "azureFunctions.pythonVenv": ".venv", 5 | "azureFunctions.projectLanguage": "Python", 6 | "azureFunctions.projectRuntime": "~3", 7 | "debug.internalConsoleOptions": "neverOpen", 8 | "python.pythonPath": ".venv\\Scripts\\python.exe" 9 | } -------------------------------------------------------------------------------- /SigmaHttpTrigger/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "authLevel": "anonymous", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "name": "req", 9 | "methods": [ 10 | "get", 11 | "post" 12 | ] 13 | }, 14 | { 15 | "type": "http", 16 | "direction": "out", 17 | "name": "$return" 18 | } 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "type": "func", 6 | "command": "host start", 7 | "problemMatcher": "$func-python-watch", 8 | "isBackground": true, 9 | "dependsOn": "pipInstall" 10 | }, 11 | { 12 | "label": "pipInstall", 13 | "type": "shell", 14 | "osx": { 15 | "command": "${config:azureFunctions.pythonVenv}/bin/python -m pip install -r requirements.txt" 16 | }, 17 | "windows": { 18 | "command": "${config:azureFunctions.pythonVenv}\\Scripts\\python -m pip install -r requirements.txt" 19 | }, 20 | "linux": { 21 | "command": "${config:azureFunctions.pythonVenv}/bin/python -m pip install -r requirements.txt" 22 | }, 23 | "problemMatcher": [] 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Alex Reed 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | 62 | # Flask stuff: 63 | instance/ 64 | .webassets-cache 65 | 66 | # Scrapy stuff: 67 | .scrapy 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | 72 | # PyBuilder 73 | target/ 74 | 75 | # Jupyter Notebook 76 | .ipynb_checkpoints 77 | 78 | # IPython 79 | profile_default/ 80 | ipython_config.py 81 | 82 | # pyenv 83 | .python-version 84 | 85 | # pipenv 86 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 87 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 88 | # having no cross-platform support, pipenv may install dependencies that don’t work, or not 89 | # install all needed dependencies. 90 | #Pipfile.lock 91 | 92 | # celery beat schedule file 93 | celerybeat-schedule 94 | 95 | # SageMath parsed files 96 | *.sage.py 97 | 98 | # Environments 99 | .env 100 | .venv 101 | env/ 102 | venv/ 103 | ENV/ 104 | env.bak/ 105 | venv.bak/ 106 | 107 | # Spyder project settings 108 | .spyderproject 109 | .spyproject 110 | 111 | # Rope project settings 112 | .ropeproject 113 | 114 | # mkdocs documentation 115 | /site 116 | 117 | # mypy 118 | .mypy_cache/ 119 | .dmypy.json 120 | dmypy.json 121 | 122 | # Pyre type checker 123 | .pyre/ 124 | 125 | # Azure Functions artifacts 126 | bin 127 | obj 128 | appsettings.json 129 | local.settings.json 130 | .python_packages -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Azure Functions 2 | 3 | ## Table of Contents 4 | 5 | - [Overview](#overview) 6 | - [Resources](#resources) 7 | - [Setup](#setup) 8 | - [Usage](#usage) 9 | - [Support These Projects](#support-these-projects) 10 | 11 | ## Overview 12 | 13 | The `TimerTrigger` makes it incredibly easy to have your functions 14 | executed on a schedule. This sample demonstrates a simple use case 15 | of calling your function every 5 minutes. For a `TimerTrigger` to 16 | work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression) 17 | (See the link for full details). A cron expression is a string with 18 | 6 separate expressions which represent a given schedule via patterns. 19 | The pattern we use to represent every 5 minutes is `0 */5 * * * *`. 20 | This, in plain text, means: "When seconds is equal to 0, minutes is 21 | divisible by 5, for any hour, day of the month, month, day of the week, 22 | or year". 23 | 24 | ## Resources 25 | 26 | To use this project you will need to install some dependencies to connect to the database. 27 | To download the drivers needed go to to [Microsoft SQL Drivers for Python](https://docs.microsoft.com/en-us/sql/connect/sql-connection-libraries?view=sql-server-ver15#anchor-20-drivers-relational-access). Once you download it, run through 28 | the installation process. 29 | 30 | **Resources - PYODBC with Azure:** 31 | 32 | If you would like to read more on the topic of using PYODBC in conjunction with Microsoft 33 | Azure, then I would refer you to the [documentation provided by Microsoft](https://docs.microsoft.com/en-us/sql/connect/python/pyodbc/step-3-proof-of-concept-connecting-to-sql-using-pyodbc?view=sql-server-ver15). 34 | 35 | **Setup - Requirement Install:** 36 | 37 | If you don't plan to use this project in any of your other projects, I would recommend you 38 | just install the dependencies by using the `requirement.txt` file. 39 | 40 | ```console 41 | pip install --requirement requirements.txt 42 | ``` 43 | 44 | ## Support These Projects 45 | 46 | **Patreon:** 47 | Help support this project and future projects by donating to my [Patreon Page](https://www.patreon.com/sigmacoding) 48 | . I'm always looking to add more content for individuals like yourself, unfortuantely some of the 49 | APIs I would require me to pay monthly fees. 50 | 51 | **YouTube:** 52 | If you'd like to watch more of my content, feel free to visit my YouTube channel [Sigma Coding](https://www.youtube.com/c/SigmaCoding). 53 | 54 | ## How it works 55 | -------------------------------------------------------------------------------- /SigmaHttpTrigger/__init__.py: -------------------------------------------------------------------------------- 1 | import time 2 | import json 3 | import pyodbc 4 | import textwrap 5 | import datetime 6 | import logging 7 | import azure.functions as func 8 | 9 | from configparser import ConfigParser 10 | 11 | def default(o): 12 | """Converts our Dates and Datetime Objects to Strings.""" 13 | if isinstance(o, (datetime.date, datetime.datetime)): 14 | return o.isoformat() 15 | 16 | 17 | def main(req: func.HttpRequest) -> func.HttpResponse: 18 | 19 | logging.info('Python HTTP Database trigger function processed a request.') 20 | 21 | # Initialize the Parser. 22 | config_parser = ConfigParser() 23 | 24 | # Load the Database Credentials. 25 | config_parser.read('SigmaHttpTrigger/config.ini') 26 | database_username = config_parser.get('sec-database', 'database_username') 27 | database_password = config_parser.get('sec-database', 'database_password') 28 | 29 | logging.info('Loaded Database Credentials.') 30 | 31 | # Grab the table name. 32 | table_name = req.params.get('table_name', 'DT_Idx_XBRL') 33 | 34 | # Grba the Drivers. 35 | logging.info(pyodbc.drivers()) 36 | 37 | # Define the Driver. 38 | driver = '{ODBC Driver 17 for SQL Server}' 39 | 40 | # Create the connection String. 41 | connection_string = textwrap.dedent(''' 42 | Driver={driver}; 43 | Server={your_server_name_here},1433; 44 | Database=sec-filings; 45 | Uid={username}; 46 | Pwd={password}; 47 | Encrypt=yes; 48 | TrustServerCertificate=no; 49 | Connection Timeout=30; 50 | '''.format( 51 | driver=driver, 52 | username=database_username, 53 | password=database_password 54 | )).replace("'", "") 55 | 56 | # Create a new connection. 57 | try: 58 | cnxn: pyodbc.Connection = pyodbc.connect(connection_string) 59 | except pyodbc.OperationalError: 60 | time.sleep(2) 61 | cnxn: pyodbc.Connection = pyodbc.connect(connection_string) 62 | 63 | logging.info(msg='Database Connection Successful.') 64 | 65 | # Create the Cursor Object. 66 | cursor_object: pyodbc.Cursor = cnxn.cursor() 67 | 68 | # Define the Query. 69 | upsert_query = textwrap.dedent(""" 70 | SELECT TOP 100 * FROM [dbo].[{table_name}] 71 | """.format(table_name=table_name)) 72 | 73 | # Execute the Query. 74 | cursor_object.execute(upsert_query) 75 | 76 | # Grab the Records. 77 | records = list(cursor_object.fetchall()) 78 | 79 | # Clean them up so we can dump them to JSON. 80 | records = [tuple(record) for record in records] 81 | 82 | logging.info(msg='Query Successful.') 83 | 84 | if records: 85 | 86 | # Return the Response. 87 | return func.HttpResponse( 88 | body=json.dumps(obj=records, indent=4, default=default), 89 | status_code=200 90 | ) 91 | -------------------------------------------------------------------------------- /SigmaTimerTrigger/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | import requests 3 | import datetime 4 | import logging 5 | import xml.etree.ElementTree as ET 6 | 7 | import azure.functions as func 8 | 9 | from typing import List 10 | from typing import Dict 11 | 12 | from azure.storage.blob import ContainerClient 13 | from azure.keyvault.secrets import SecretClient 14 | 15 | from azure.identity import ClientSecretCredential 16 | from azure.identity import DefaultAzureCredential 17 | 18 | 19 | def parse_rss_feed(content: str) -> List[Dict]: 20 | """Parses the Content from the Azure Article feed. 21 | 22 | ### Parameters 23 | ---------- 24 | content : str 25 | The HTTP response content returned from 26 | the request. 27 | 28 | ### Returns 29 | ------- 30 | list[dict] 31 | A list of article dictionaries that have been 32 | parsed. 33 | """ 34 | 35 | # Initialize a list to store articles. 36 | articles = [] 37 | 38 | # Parse it. 39 | root = ET.fromstring(content) 40 | 41 | # Grab all the articles by finding the "item" tag. 42 | articles_collection = root.findall("./channel/item" ) 43 | 44 | # Loop through the articles. 45 | for article in articles_collection: 46 | 47 | # Prep a dictionary. 48 | article_dict = {} 49 | 50 | # Loop through each element in the "item" element. 51 | for elem in article.iter(): 52 | 53 | # Set the tag as the key and the text as the value. 54 | article_dict[elem.tag] = elem.text.strip() 55 | 56 | # Add to the master list. 57 | articles.append(article_dict) 58 | 59 | return articles 60 | 61 | 62 | def main(mytimer: func.TimerRequest) -> None: 63 | 64 | urls_to_pull = [ 65 | 'https://techcommunity.microsoft.com/plugins/custom/microsoft/o365/custom-blog-rss?board=AzureDataFactory&size=1000', 66 | 'https://techcommunity.microsoft.com/plugins/custom/microsoft/o365/custom-blog-rss?board=AzureDataShare&size=1000', 67 | 'https://techcommunity.microsoft.com/plugins/custom/microsoft/o365/custom-blog-rss?board=AzureStorage&size=1000', 68 | 'https://techcommunity.microsoft.com/plugins/custom/microsoft/o365/custom-blog-rss?board=AzureDataExplorer&size=1000', 69 | 'https://techcommunity.microsoft.com/plugins/custom/microsoft/o365/custom-blog-rss?board=AzureDataFactoryBlog&size=1000', 70 | 'https://techcommunity.microsoft.com/plugins/custom/microsoft/o365/custom-blog-rss?board=AzureToolsBlog&size=1000' 71 | ] 72 | 73 | # Initialize the Credentials. 74 | default_credential = DefaultAzureCredential() 75 | 76 | # Create a Secret Client, so we can grab our Connection String. 77 | secret_client = SecretClient( 78 | vault_url='https://sigma-key-vault.vault.azure.net/', 79 | credential=default_credential 80 | ) 81 | 82 | # Grab the Blob Connection String, from our Azure Key Vault. 83 | blob_conn_string = secret_client.get_secret( 84 | name='blob-storage-connection-string' 85 | ) 86 | 87 | # Connect to the Container. 88 | container_client = ContainerClient.from_connection_string( 89 | conn_str=blob_conn_string.value, 90 | container_name='microsoft-azure-articles' 91 | ) 92 | 93 | articles = [] 94 | 95 | # Loop through each URL. 96 | for url in urls_to_pull: 97 | 98 | # Grab the Response. 99 | response = requests.get(url=url) 100 | 101 | # If it was okay. 102 | if response.ok: 103 | 104 | # Then Parse the articles and combine them. 105 | articles_parsed = parse_rss_feed(content=response.content) 106 | 107 | # Some feeds are empty. 108 | if articles_parsed: 109 | articles = articles + articles_parsed 110 | 111 | # Create a dynamic filename. 112 | filename = "Microsoft RSS Feeds/articles_{ts}.json".format( 113 | ts=datetime.datetime.now().timestamp() 114 | ) 115 | 116 | # Create a new Blob. 117 | container_client.upload_blob( 118 | name=filename, 119 | data=json.dumps(obj=articles, indent=4), 120 | blob_type="BlockBlob" 121 | ) 122 | 123 | logging.info('File loaded to Azure Successfully...') 124 | 125 | # Grab the UTC Timestamp. 126 | utc_timestamp = datetime.datetime.utcnow().replace( 127 | tzinfo=datetime.timezone.utc 128 | ).isoformat() 129 | 130 | # Send message if Past Due. 131 | if mytimer.past_due: 132 | logging.info('The timer is past due!') 133 | 134 | # Otherwise let the user know it ran. 135 | logging.info('Python timer trigger function ran at %s', utc_timestamp) 136 | --------------------------------------------------------------------------------