├── .gitignore ├── LICENSE ├── README.md ├── docs ├── create-function-app-in-azure-portal.md ├── custom-python-version.md ├── install-python-modules.md ├── local-git-deployment_ja.md ├── quickstart-samples-custom-image-with-docker.md └── quickstart-v2-python-functions.md ├── handson ├── README.md ├── module0 │ ├── README_ja.md │ └── http-trigger-helloworld │ │ ├── function.json │ │ └── run.py ├── module1 │ ├── README_ja.md │ └── http-trigger-feed-to-queue │ │ ├── function.json │ │ ├── run.py │ │ └── send-https-requests.sh └── module2 │ ├── README_ja.md │ └── queue-trigger-rss-crawl-out-cosmosdb │ ├── README.md │ ├── function.json │ └── run.py ├── img ├── azure-function-x-python.png ├── cognitive-computer-vision-sample.png ├── custom-python-version-1.png ├── custom-python-version-2.png ├── custom-python-version-3.png ├── custom-python-version-4.png ├── custom-python-version-5.png ├── handson-mod1-appsetting.png ├── handson-mod1-integration.png ├── handson-mod2-appsetting.png ├── handson-mod2-integration-doc.png ├── handson-mod2-integration-queue.png └── handson-mod2-test-cosmosdb.png ├── scripts ├── create-azfunc-v2-linux-app-service-plan.sh ├── create-azfunc-v2-linux-consumption-plan.sh ├── create-cognitive-computer-vision.sh ├── create-cosmosdb-test-db-coll.sh ├── create-resource-group.sh ├── create-storage-account.sh ├── docker-build.sh ├── docker-run-mini.sh ├── docker-run.sh ├── get-blob-sas-token.sh ├── local-dev-helpers │ ├── blob-container-create │ ├── blob-delete │ ├── blob-list │ ├── blob-upload │ ├── env.conf │ ├── queue-create │ ├── queue-delete │ ├── queue-list │ ├── queue-message-get │ ├── queue-message-put │ └── start-azurite ├── send-test-blob-sas-token.sh ├── setup-az-for-azfunc-linux-preview.sh ├── update-azfunc-app-settings.sh └── upload-blob-sas-token.py ├── v1functions ├── blob-sas-token-generator │ ├── README.md │ ├── function │ │ ├── function.json │ │ └── run.py │ ├── send-https-request.sh │ └── t.sh ├── blob-trigger-blob-in-out-bindings │ ├── README.md │ ├── clear.txt │ ├── clear.zip │ ├── function │ │ ├── function-sample.json │ │ ├── function.json │ │ └── run.py │ └── function_zip │ │ ├── function.json │ │ └── run.py ├── cosmosdb-trigger-cosmosdb-in-binding │ ├── README.md │ └── function │ │ ├── function.json │ │ └── run.py ├── eventhub-trigger-table-out-bindings │ ├── README.md │ ├── function │ │ ├── function.json │ │ └── run.py │ └── send-event.py ├── http-trigger-dump-request │ ├── README.md │ ├── function │ │ ├── function.json │ │ └── run.py │ └── send-https-requests.sh ├── proxies-simple-condition-matches │ ├── README.md │ ├── function-http-trigger-content │ │ ├── function.json │ │ └── run.py │ ├── img │ │ ├── dynamic-page-result.png │ │ └── static-page-result.png │ ├── proxies.json │ └── static │ │ ├── lasvegas.html │ │ └── lasvegas.png ├── queue-trigger-blob-in-binding │ ├── README.md │ └── function │ │ ├── function.json │ │ └── run.py ├── queue-trigger-cosmosdb-in-binding │ ├── function.json │ └── run.py ├── queue-trigger-rssfeed-crawler │ ├── README.md │ ├── function │ │ ├── function.json │ │ └── run.py │ └── setup-site-packages.sh ├── queue-trigger-sendgrid │ ├── README.md │ └── function │ │ ├── function.json │ │ ├── function.json.example │ │ └── run.py ├── queue-trigger-tagging-images │ ├── README.md │ └── function │ │ ├── function.json │ │ └── run.py └── timer-trigger-azuresearch-index-monitoring │ ├── README.md │ └── function │ ├── function.json │ └── run.py └── v2functions ├── Dockerfile ├── VERSION ├── blob-trigger-cosmosdb-out-binding ├── __init__.py ├── function.json └── readme.md ├── blob-trigger-watermark-blob-out-binding ├── __init__.py ├── function.json ├── readme.md ├── sample.jpg └── watermark.png ├── cosmos-trigger-cosmodb-output-binding ├── __init__.py ├── function.json └── readme.md ├── host.json ├── http-trigger-blob-sas-token ├── __init__.py ├── function.json └── readme.md ├── http-trigger-dump-request ├── __init__.py ├── function.json └── readme.md ├── http-trigger-onnx-model ├── __init__.py ├── example.png ├── function.json ├── rain_princess.onnx └── readme.md ├── local.settings.json ├── local.settings.json.sample ├── queue-trigger-blob-in-out-binding ├── __init__.py ├── function.json └── readme.md ├── requirements.txt ├── sbqueue-trigger-sbqueue-out-binding ├── __init__.py ├── function.json └── readme.md └── timer-trigger-cosmosdb-output-binding ├── __init__.py ├── function.json └── readme.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Visual Studio Code 2 | .vscode 3 | 4 | # Global 5 | .DS_Store 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | env/ 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *,cover 52 | .hypothesis/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | 62 | # Flask stuff: 63 | instance/ 64 | .webassets-cache 65 | 66 | # Scrapy stuff: 67 | .scrapy 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | 72 | # PyBuilder 73 | target/ 74 | 75 | # IPython Notebook 76 | .ipynb_checkpoints 77 | 78 | # pyenv 79 | .python-version 80 | 81 | # celery beat schedule file 82 | celerybeat-schedule 83 | 84 | # dotenv 85 | .env 86 | 87 | # virtualenv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # Personal test dir 98 | /t/* 99 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Yoichi Kawasaki 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # azure-functions-python-samples 2 | Azure Functions Python Sample Codes 3 | 4 | ![](https://github.com/yokawasa/azure-functions-python-samples/raw/master/img/azure-function-x-python.png) 5 | 6 | Table of Contents 7 | - [azure-functions-python-samples](#azure-functions-python-samples) 8 | - [Python functions on Azure Functions 2.X (Public Preview)](#python-functions-on-azure-functions-2x-public-preview) 9 | - [Samples](#samples) 10 | - [Documents](#documents) 11 | - [Python functions on Azure Functions 1.X (Experimental)](#python-functions-on-azure-functions-1x-experimental) 12 | - [Contributing](#contributing) 13 | 14 | 15 | ## Python functions on Azure Functions 2.X (Public Preview) 16 | 17 | This is a collection of Python function samples on Azure Functions 2.X. For a comprehensive development and debugging experience, use the [Azure Functions Core Tools](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-python) or [VS Code extension](https://code.visualstudio.com/tutorials/functions-extension/getting-started). 18 | 19 | ### Samples 20 | | Sample | Description | Trigger | In Bindings | Out Bindings 21 | | ------------- | ------------- | ------------- | ----------- | ----------- | 22 | | [cosmosdb-trigger-cosmosdb-in-binding](v2functions/blob-trigger-cosmosdb-out-binding) | Azure Functions Blob Storage Trigger Python Sample. The function gets image data from Azure Blob Trigger, gets tags for the image with [Computer Vision API](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/) ([Azure Cognitive Services](https://azure.microsoft.com/en-us/services/cognitive-services/)), and store the tags into Azure Cosmos DB by leveraging Cosmos DB output binding | Blob Storage | NONE | CosmosDB | 23 | | [cosmos-trigger-cosmodb-output-binding](v2functions/cosmos-trigger-cosmodb-output-binding) | Azure Functions Cosmos DB Trigger Python Sample. The function gets document data from Azure Cosmos DB Trigger, ROT13 encodes obtained clear text, and store encoded data into Azure Cosmos DB by using Cosmos DB output binding | CosmosDB | NONE | CosmosDB | 24 | | [queue-trigger-blob-in-out-binding](v2functions/queue-trigger-blob-in-out-binding) | Azure Functions Queue Trigger Python Sample. The function gets a file name from queue message, reads a blob file named the file name using Blob Input Binding, then ROT13 encodes the obtained clear text, and finally stores it into Azure Blob Storage using Blob Output Binding | Queue Storage | Blob Storage | Blob Storage | 25 | | [timer-trigger-cosmos-output-binding](v2functions/timer-trigger-cosmosdb-output-binding) | Azure Functions Timer Trigger Python Sample. The function gets blog RSS feed and store the results into CosmosDB using Cosmos DB output binding | Timer | NONE | CosmosDB | 26 | | [http-trigger-blob-sas-token](v2functions/http-trigger-blob-sas-token) | Azure Function HTTP Trigger Python Sample that returns a SAS token for Azure Storage for the specified container and blob name | HTTP | NONE | HTTP | 27 | | [http-trigger-dump-request](v2functions/http-trigger-dump-request) | Azure Function HTTP Trigger Python Sample that returns request dump info with JSON format | HTTP | NONE | HTTP | 28 | | [http-trigger-onnx-model](v2functions/http-trigger-onnx-model) | This function demonstrates running an inference using an ONNX model. It is triggered by an HTTP request. | HTTP | NONE | HTTP | 29 | | [blob-trigger-watermark-blob-out-binding](v2functions/blob-trigger-watermark-blob-out-binding) | Azure Function Python Sample that watermarks an image. This function triggers on an input blob (image) and adds a watermark by calling into the Pillow library. The resulting composite image is then written back to blob storage using a blob output binding. | Blob Storage | Blob Storage | Blob Storage | 30 | | [sbqueue-trigger-sbqueue-out-binding](v2functions/sbqueue-trigger-sbqueue-out-binding) | Azure Functions Service Bus Queue Trigger Python Sample. The function demonstrates reading from a Service Bus queue and placing a message into an output Service Bus queue. | Service Bus Queue | None | Service Bus Queue | 31 | 32 | ### Documents 33 | * [Quickstart V2 Python Functions with Azure Functions Core Tools](docs/quickstart-v2-python-functions.md) 34 | * [Quickstart Function Samples as a Custom image with Docker](docs/quickstart-samples-custom-image-with-docker.md) 35 | * [Azure Functions Python developer guide](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python) 36 | * [Zip push deployment for Azure Functions](https://docs.microsoft.com/en-us/azure/azure-functions/deployment-zip-push) 37 | * [Work with Azure Functions Proxies](https://docs.microsoft.com/en-us/azure/azure-functions/functions-proxies) 38 | * [Create a function triggered by Azure Blob storage](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-storage-blob-triggered-function) 39 | * [Create a function triggered by Azure Cosmos DB](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-cosmos-db-triggered-function) 40 | 41 | --- 42 | ## Python functions on Azure Functions 1.X (Experimental) 43 | 44 | **IMPORTANT** 45 | - **By default, function apps created in the Azure portal are set to version 2.x. When possible, you should use this runtime version, where new feature investments are being made. Please see [this](https://docs.microsoft.com/en-us/azure/azure-functions/functions-versions) for more detail on Azure Function runtime versions and supported languages.** 46 | - **Please consider to use 2.X Python funciton as Python function in Azure function 1.X is experimental and new feature investments won't be added to 1.X Python function.** 47 | 48 | | Sample | Description | Trigger | In Bindings | Out Bindings 49 | | ------------- | ------------- | ------------- | ----------- | ----------- | 50 | | [cosmosdb-trigger-cosmosdb-in-binding](v1functions/cosmosdb-trigger-cosmosdb-in-binding) | Azure Functions CosmosDB Trigger Python Sample. The function simply read & dump documets which are added to or changed in Azure Cosmos DB by leveraging CosmosDB input binding | CosmosDB | CosmosDB | NONE | 51 | | [blob-trigger-blob-in-out-bindings](v1functions/blob-trigger-blob-in-out-bindings) | Azure Functions Blob Trigger Python Sample that simply read file from Azure Blob Storage and write an output file to Azure Blob Storage using Blob Storage input and output bindings respectively | Blob Storage | Blob Storage | Blob Storage | 52 | | [queue-trigger-blob-in-bindings](v1functions/queue-trigger-blob-in-binding) | Azure Functions Queue Trigger Python Sample that obtain a blog file name from Queue as a queue message and read a file named the blog file name in Azure Blob Storage using Blob Input Binding| Queue Storage | Blob Storage| NONE | 53 | | [queue-trigger-rssfeed-crawler](v1functions/queue-trigger-rssfeed-crawler) | Azure Functions Queue Trigger Python Sample that get RSS feed URL from Queue and dump all items that obtained from RSS feed| Queue Storage| NONE | NONE | 54 | | [queue-trigger-tagging-images](v1functions/queue-trigger-tagging-images) | Azure Functions Queue Trigger Python Sample that tags images stored on Azure Blob Storage by using Cognitive Vision API | Queue Storage| NONE | NONE | 55 | | [queue-trigger-sendgrid](v1functions/queue-trigger-sendgrid) | Azure Functions Queue Trigger Python Sample that send email by using SendGrid bindings | Queue Storage| NONE | SendGrid | 56 | | [queue-trigger-cosmosdb-in-binding](v1functions/queue-trigger-cosmosdb-in-binding) | Azure Functions Queue Trigger that obtains a document ID from Queue as a queue message, select a document object from Cosmos DB by using the document ID, and finally dump the object | Queue Storage| CosmosDB | NONE | 57 | | [http-trigger-dump-request](v1functions/http-trigger-dump-request) | Azure Functions HTTP Trigger Python Sample that get and dump HTTPS request info that the trigger receives | HTTP | NONE | HTTP | 58 | | [blob-sas-token-generator](v1functions/blob-sas-token-generator) | Azure Function HTTP Trigger Python Sample that returns a SAS token for Azure Storage for the specified container and blob name | HTTP | NONE | HTTP | 59 | | [timer-trigger-azuresearch-index-monitoring](v1functions/timer-trigger-azuresearch-index-monitoring) | Azure Functions Timer Trigger Python Sample that get Azure Search index statistics via API and store the results into CosmosDB | Timer | NONE | CosmosDB | 60 | | [eventhub-trigger-table-out-bindings](v1functions/eventhub-trigger-table-out-bindings) | Azure Functions EventHub Trigger Python Sample that read message (device info) in EventHub that sent from sender and write an output record to Azure Table Storage using Table bindings | EventHub | NONE | Table Storage| 61 | | [proxies-simple-condition-matches](v1functions/proxies-simple-condition-matches) | Azure Functions Python Sample that re-write dynamic and static page url using Azure Functions Proxies | HTTP | NONE | HTTP | 62 | 63 | ## Contributing 64 | Bug reports and pull requests are welcome on GitHub at https://github.com/yokawasa/azure-functions-python-samples 65 | -------------------------------------------------------------------------------- /docs/create-function-app-in-azure-portal.md: -------------------------------------------------------------------------------- 1 | # Create a 1.X Python function in the Azure portal 2 | 3 | **IMPORTANT - You're no longer able to create 1.X Python function in the Azure Portal** 4 | 5 | > NOTE 6 | > - By default, function apps created in the Azure portal are set to version 2.x. When possible, you should use this runtime version, where new feature investments are being made. Please see [this](https://docs.microsoft.com/en-us/azure/azure-functions/functions-versions) for more detail on Azure Function runtime versions and supported languages 7 | > - Please consider to use 2.X Python funciton as Python function in Azure function 1.X is experimental and new feature investments won't be added to 1.X Python function. -------------------------------------------------------------------------------- /docs/custom-python-version.md: -------------------------------------------------------------------------------- 1 | # [Deprecated] How to change the Python version used in a Function App (1.X Function runtime) 2 | 3 | **IMPORTANT - Please consider to use 2.X Python function as Python function in Azure function 1.X is experimental and new feature investments won't be added to 1.X Python function** 4 | 5 | ## 1. Create Function App (if you don't have the one yet) 6 | 7 | **IMPORTANT - You're no longer able to create 1.X Python function in the Azure Portal** 8 | 9 | *[note] Please make sure to choose **App Service Plan** for your Function App's Hosting plan. 10 | From my experince, Custom Python runtime does always work under Consumption plan (Need to figure out the reason). For the difference between App Service Plan and Consumption plan, please refer to [Azure Functions hosting plans comparison](https://github.com/MicrosoftDocs/azure-docs/blob/master/articles/azure-functions/functions-scale.md)* 11 | 12 | ## 2. Install Python 3.X x64 Site Extention in Kudu UI 13 | 14 | In the Platform features page, click **Advanced tools (Kudu)** to go to Kudu UI. Or you can go to Kudo UI with the URL like **https://(your-function-app-name).scm.azurewebsites.net/** 15 | ![](../img/custom-python-version-1.png) 16 | In Kudu UI, click **Site extensions** to navigate you to Site Extensions page. 17 | ![](../img/custom-python-version-2.png) 18 | In Site Extensions page, select Gallery menu and typein **Python** as keyword for search box to get available Python runtimes modules, and install a 64 bit version of Python 3.5.4, Python 3.6.1, or whichever Python 3.X module available. In this case, Python 3.6.1 was chosen and installed in D:\home\python361x64 19 | ![](../img/custom-python-version-3.png) 20 | 21 | ## 3. In App Settings, add Handler Mappings entry so as to use Python3.X via FastCGI 22 | 23 | In App Settings page, scroll down to "Handler Mappings" section, and Add new handler mapping like this: 24 | ![](../img/custom-python-version-4.png) 25 | Suppose you installed Python 3.6.1 x64: 26 | 27 | | Key | Value 28 | | ------------- | ------------- | 29 | | Extension | fastCgi | 30 | | ScriptProcessor | D:\home\python361x64\python.exe | 31 | | Arguments | D:\home\python361x64\wfastcgi.py | 32 | 33 | Suppose you installed Python 3.5.4 x64: 34 | 35 | | Key | Value 36 | | ------------- | ------------- | 37 | | Extension | fastCgi | 38 | | ScriptProcessor | D:\home\python354x64\python.exe | 39 | | Arguments | D:\home\python354x64\wfastcgi.py | 40 | 41 | ## 4. Test the python version being used in Function App 42 | Add a new function and add sample code like the following to see which Python version is being used in the Function App: 43 | ``` 44 | import os 45 | import json 46 | import platform 47 | postreqdata = json.loads(open(os.environ['req']).read()) 48 | response = open(os.environ['res'], 'w') 49 | response.write("Python version: {0}".format(platform.python_version())) 50 | response.close() 51 | ``` 52 | ![](../img/custom-python-version-5.png) 53 | -------------------------------------------------------------------------------- /docs/install-python-modules.md: -------------------------------------------------------------------------------- 1 | # [Deprecated] How to install Python modules (1.X Function runtime) 2 | 3 | **IMPORTANT - Please consider to use 2.X Python funciton as Python function in Azure function 1.X is experimental and new feature investments won't be added to 1.X Python function** 4 | 5 | Basically there are 2 ways for you to install python modules 6 | * 1. Installing python module using pip 7 | * 2. Uploading python module files via Kudu UI/Console 8 | 9 | 10 | ## 1. Installing python module using pip 11 | 12 | Here is how you install python module using pip in Kudu DebugConsole. 13 | 14 | Assuming you want to install feedparser module, 15 | 16 | 17 | 1-1. Open Kudu Debug Console: https://APPNAME.scm.azurewebsites.net/DebugConsole 18 | 19 | 1-2. (Optional) Check current installed module list 20 | ``` 21 | D:\home> python -m pip list 22 | ``` 23 | You will see the following output 24 | ``` 25 | pip (1.5.6) 26 | setuptools (6.0.2) 27 | virtualenv (1.11.6) 28 | ``` 29 | 1-3. CD to your function's base directory 30 | ``` 31 | D:\home> cd site\wwwroot\ 32 | ``` 33 | 1-4. Create virtual env on your function's directory and activate it 34 | ``` 35 | D:\home\site\wwwroot\ python -m virtualenv myenv 36 | D:\home\site\wwwroot\ cd myenv\Scripts 37 | D:\home\site\wwwroot\\myenv\Scripts> activate.bat 38 | (myenv) D:\home\site\wwwroot\\myenv\Scripts> 39 | ``` 40 | 1-5. (Optional) Update the pip module to latest one 41 | ``` 42 | (myenv) D:\home\site\wwwroot\\myenv\Scripts>python -m pip install -U pip 43 | ``` 44 | You will see the following output 45 | ``` 46 | Downloading/unpacking pip from https://pypi.python.org/packages/b6/ac/7015eb97dc749283ffdec1c3a88ddb8ae03b8fad0f0e611408f196358da3/pip-9.0.1-py2.py3-none-any.whl#md5=297dbd16ef53bcef0447d245815f5144 47 | Installing collected packages: pip 48 | Found existing installation: pip 1.5.6 49 | Uninstalling pip: 50 | Successfully uninstalled pip 51 | Successfully installed pip 52 | Cleaning up... 53 | ``` 54 | 1-6. Install the module you want. Here you install feedparser 55 | ``` 56 | (myenv) D:\home\site\wwwroot\\myenv\Scripts>python -m pip install feedparser 57 | ``` 58 | 59 | Once you install the module, you can leverage it in your function code. Don't forget to include the package path for the module in the PATH like this: 60 | 61 | ``` 62 | # -*- coding: utf-8 -*- 63 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname( __file__ ), 'myenv/Lib/site-packages'))) 64 | import feedparser 65 | ``` 66 | 67 | That's it! 68 | 69 | Please also refer to 'Azure App Service Kudu console' secion of the page: [Managing Python on Azure App Service](https://docs.microsoft.com/en-us/visualstudio/python/managing-python-on-azure-app-service) 70 | 71 | ## 2. Uploading python module files via Kudu UI/DebugConsole 72 | 73 | This is very straightfoward approach - you simply download the module and upload it using Kudo Debug Console. Just upload files and folder using drag and drop onto your function's base directory. 74 | https://github.com/projectkudu/kudu/wiki/Kudu-console 75 | 76 | Once you upload the module files, then include the package path for the module in the PATH like (1) above. 77 | 78 | This seems very easy but you have to prepare the right packages that is compatible with your function's platform - this is the tougheast point in this procedure. 79 | 80 | -------------------------------------------------------------------------------- /docs/local-git-deployment_ja.md: -------------------------------------------------------------------------------- 1 | # [Deprecated] Local Git Deployment to Azure Functions (1.X Function runtime) 2 | 3 | **IMPORTANT - Please consider to use 2.X Python funciton as Python function in Azure function 1.X is experimental and new feature investments won't be added to 1.X Python function** 4 | 5 | ## 1. Step 1: ローカルレポジトリの作成 6 | $ git init 7 | ``` 8 | $ cd LOCAL_GIT_REPO_DIR 9 | $ git init 10 | Initialized empty Git repository in /LOCAL_GIT_REPO_DIR/.git/ 11 | ``` 12 | 13 | ## 2: ローカルレポジトリにソースコードをコミット 14 | "mypyfunc"という名前のfunctionを作成する。ローカルレポジトリルート配下に"mypyfunc"ディレクトリを作成して、その配下にrun.py、 function.json、その他関連ファイルを配置して全てのファイルをコミットする 15 | 16 | ``` 17 | $ cd LOCAL_GIT_REPO_DIR 18 | $ find mypyfunc 19 | $ mypyfunc 20 | $ mypyfunc/function.json 21 | $ mypyfunc/run.py 22 | $ git add mypyfunc 23 | $ git commit -m "Added mypyfunc" 24 | ``` 25 | 26 | ## 3: App Service アプリのリポジトリを有効にする 27 | [手順 3: App Service アプリのリポジトリを有効にする](https://docs.microsoft.com/ja-jp/azure/app-service/app-service-deploy-local-git#span-data-ttu-idd68c5-131a-namestep3a手順-3-app-service-アプリのリポジトリを有効にするspanspan-classsxs-lookupspan-data-stu-idd68c5-131a-namestep3astep-3-enable-the-app-service-app-repositoryspanspan)を参考にFunction Appに対して Git リポジトリを有効にする 28 | 29 | 30 | ## 4: プロジェクトをデプロイする 31 | 32 | ポータルのFunction Appの[設定]、[プロパティ] の順にクリックし、[Git URL (Git の URL)] を確認 33 | ``` 34 | Git URL例: https://yoichika@yoichikademo27.scm.azurewebsites.net:443/yoichikademo27.git 35 | ``` 36 | 37 | Function AppのGitレポジトリに対して'azure'という名前の参照を作成 38 | 39 | ``` 40 | $ cd LOCAL_GIT_REPO_DIR 41 | $ git remote add azure https://yoichika@yoichikademo27.scm.azurewebsites.net:443/yoichikademo27.git 42 | ``` 43 | 44 | ローカルのコンテンツをFunction Appにプッシュ. この時、初回であれば3のステップで設定・取得したデプロイ資格情報の入力を求められる 45 | ``` 46 | git push azure master 47 | Password for 'https://yoichika@yoichikademo27.scm.azurewebsites.net:443': 48 | ``` 49 | 50 | 以下実行例 51 | ``` 52 | $ git push azure master 53 | Counting objects: 4, done. 54 | Delta compression using up to 4 threads. 55 | Compressing objects: 100% (3/3), done. 56 | Writing objects: 100% (4/4), 389 bytes | 389.00 KiB/s, done. 57 | Total 4 (delta 1), reused 0 (delta 0) 58 | remote: Updating branch 'master'. 59 | remote: Updating submodules. 60 | remote: Preparing deployment for commit id 'c0f99c5a43'. 61 | remote: Generating deployment script. 62 | remote: Running deployment command... 63 | remote: Handling function App deployment. 64 | remote: Not using funcpack because SCM_USE_FUNCPACK is not set to 1 65 | remote: Installing function extensions from nuget 66 | remote: KuduSync.NET from: 'D:\home\site\repository' to: 'D:\home\site\wwwroot' 67 | remote: Copying file: 'mypyfunc\function.json' 68 | remote: Copying file: 'mypyfunc\run.py' 69 | remote: Restoring npm packages in "D:\home\site\wwwroot" 70 | remote: Finished successfully. 71 | remote: Running post deployment command(s)... 72 | remote: Syncing 2 function triggers with payload size 228 bytes successful. 73 | remote: Deployment successful. 74 | To https://yoichikademo27.scm.azurewebsites.net:443/yoichikademo27.git 75 | 902abf2..c0f99c5 master -> master 76 | ``` 77 | 注意: 一旦これでリリースするとPortalでは編集できなくなります 78 | 79 | 80 | # LINKS 81 | * https://docs.microsoft.com/ja-jp/azure/app-service/app-service-deploy-local-git 82 | -------------------------------------------------------------------------------- /docs/quickstart-samples-custom-image-with-docker.md: -------------------------------------------------------------------------------- 1 | # Quickstart Function Samples as a Custom image with Docker 2 | 3 | This is a quickstart on how you start running Python function samples as a custom image (Container) with Docker. 4 | 5 | 6 | - [Quickstart Function Samples as a Custom image with Docker](#quickstart-function-samples-as-a-custom-image-with-docker) 7 | - [Prerequisites](#prerequisites) 8 | - [Git clone source code](#git-clone-source-code) 9 | - [Create Azure Resources that required to run the samples](#create-azure-resources-that-required-to-run-the-samples) 10 | - [[Required] Azure Storage Account](#required-azure-storage-account) 11 | - [[Optional] CosmosDB and Computer Vision API](#optional-cosmosdb-and-computer-vision-api) 12 | - [Build Container Image](#build-container-image) 13 | - [Run the image locally](#run-the-image-locally) 14 | - [1. Run the image with minimum configuration](#1-run-the-image-with-minimum-configuration) 15 | - [2. Run the image with full configuration](#2-run-the-image-with-full-configuration) 16 | - [Test access to the functions](#test-access-to-the-functions) 17 | - [Tips](#tips) 18 | - [Console Logging Option](#console-logging-option) 19 | - [LINKS](#links) 20 | 21 | 22 | ## Prerequisites 23 | - [Docker](https://docs.docker.com/) 24 | 25 | ## Git clone source code 26 | ```bash 27 | git clone https://github.com/yokawasa/azure-functions-python-samples.git 28 | ``` 29 | 30 | ## Create Azure Resources that required to run the samples 31 | 32 | This project include a set of multiple sample function and each function may have different required resources. Please check `readme.md` included in each function sample (Check [v2functions](../v2functions)). 33 | 34 | ### [Required] Azure Storage Account 35 | A minimum is an `Azure Storage Account` which is necessary for all functions. Here is how you create: 36 | 37 | > [scripts/create-resource-group.sh](../scripts/create-resource-group.sh) 38 | ```bash 39 | RESOURCE_GROUP="" 40 | REGION="" 41 | az group create --name $RESOURCE_GROUP --location $REGION 42 | ``` 43 | > [scripts/create-storage-account.sh](../scripts/create-storage-account.sh) 44 | ```bash 45 | RESOURCE_GROUP="" 46 | REGION="" 47 | STORAGE_ACCOUNT="" 48 | 49 | echo "Create an Azure Storage account: $STORAGE_ACCOUNT" 50 | az storage account create --name $STORAGE_ACCOUNT \ 51 | --location $REGION \ 52 | --resource-group $RESOURCE_GROUP \ 53 | --sku Standard_LRS 54 | ``` 55 | 56 | ### [Optional] CosmosDB and Computer Vision API 57 | The rest of resources such as Cosmos DB account and Computer Vision Subscription are optionals: 58 | 59 | For CosmosDB Account and its database and collections, you can leverage the following helper script. Adding required params in the script and running will create a CosmosDB Account and database and collections. 60 | > [scripts/create-cosmosdb-test-db-coll.sh](../scripts/create-cosmosdb-test-db-coll.sh) 61 | 62 | For Computer Vision API subscription, you can leverage the following helper script. Likewise, add required params in the script and run it. 63 | > [scripts/create-cognitive-computer-vision.sh](../scripts/create-cognitive-computer-vision.sh) 64 | 65 | ## Build Container Image 66 | 67 | Let's build the image from the Docker file using `docker build` command. 68 | 69 | ```bash 70 | cd v2functions 71 | 72 | # Build the image with `docker build` command 73 | # docker build --tag /: . 74 | docker build --tag yoichikawasaki/azfuncpythonsamples:v0.0.1 . 75 | ``` 76 | You can also use a helper script - [scripts/docker-build.sh](../scripts/docker-build.sh) 77 | 78 | ## Run the image locally 79 | 80 | Now you're ready to run the app. You have 2 options 81 | 82 | ### 1. Run the image with minimum configuration 83 | 84 | > [scripts/docker-run-mini.sh](../scripts/docker-run-mini.sh) 85 | ```bash 86 | ... 87 | docker run -p 8080:80 -it \ 88 | -e AzureWebJobsStorage="$STORAGE_CONNECTION_STRING" \ 89 | $DOCKER_ID/$CONTAINER_IMAGE_NAME:$TAG 90 | ... 91 | ``` 92 | 93 | ### 2. Run the image with full configuration 94 | 95 | > [scripts/docker-run.sh](../scripts/docker-run.sh) 96 | ```bash 97 | ... 98 | docker run -p 8080:80 -it \ 99 | -e AzureWebJobsStorage="$STORAGE_CONNECTION_STRING" \ 100 | -e MyStorageConnectionString="$STORAGE_CONNECTION_STRING" \ 101 | -e MyCosmosDBConnectionString="$COSMOSDB_CONNECTION_STRING" \ 102 | -e ComputerVisionSubscription="$COMPUTER_VSION_API_SUBSCRIPTION" \ 103 | -e ComputerVisionApiEndpoint="$COMPUTER_VSION_API_ENDPOINT" \ 104 | $DOCKER_ID/$CONTAINER_IMAGE_NAME:$TAG 105 | ... 106 | ``` 107 | 108 | ## Test access to the functions 109 | 110 | Once you start the app with docker, let's send a test request to `http-trigger-dump-request` function: 111 | 112 | ```bash 113 | curl -s http://localhost:8080/api/http-trigger-dump-request |jq 114 | 115 | { 116 | "method": "GET", 117 | "url": "http://localhost:8080/api/http-trigger-dump-request", 118 | "headers": { 119 | "accept": "*/*", 120 | "host": "localhost:8080", 121 | "user-agent": "curl/7.54.0" 122 | }, 123 | "params": {}, 124 | "get_body": "" 125 | } 126 | ``` 127 | 128 | ## Tips 129 | ### Console Logging Option 130 | By default, Console Logging is not enabled, and you can enable it by setting the following option as an ENV variable in Dockerfile or giving the option in running docker: 131 | ``` 132 | ENV AzureFunctionsJobHost__Logging__Console__IsEnabled=true 133 | ``` 134 | 135 | ## LINKS 136 | - [Create a function on Linux using a custom image](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-function-linux-custom-image) -------------------------------------------------------------------------------- /docs/quickstart-v2-python-functions.md: -------------------------------------------------------------------------------- 1 | # Quickstart V2 Python Functions with Azure Functions Core Tools 2 | 3 | This is a quickstart on how you create and deploy a Python function on Azure Functions 2.X using the [Azure Functions Core Tools](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-python). 4 | 5 | 6 | 7 | - [Quickstart V2 Python Functions with Azure Functions Core Tools](#quickstart-v2-python-functions-with-azure-functions-core-tools) 8 | - [Prerequisites](#prerequisites) 9 | - [Create a Python functions project](#create-a-python-functions-project) 10 | - [Create Python functions from templates](#create-python-functions-from-templates) 11 | - [Create and activate a virtual environment](#create-and-activate-a-virtual-environment) 12 | - [Manage package with requirements.txt](#manage-package-with-requirementstxt) 13 | - [function.json](#functionjson) 14 | - [Update the host.json file to use extension bundles,](#update-the-hostjson-file-to-use-extension-bundles) 15 | - [Run the function locally](#run-the-function-locally) 16 | - [Publishing to Azure](#publishing-to-azure) 17 | - [LINKS](#links) 18 | 19 | 20 | ## Prerequisites 21 | - Install `Python 3.6` 22 | - Install [Azure Core Tools version 2.x](https://docs.microsoft.com/en-us/azure/azure-functions/functions-run-local#v2) (the latest one) 23 | 24 | ```bash 25 | # Install the latest Azure Core Tools version 2.x 26 | npm install -g azure-functions-core-tools 27 | 28 | # If it's on macOS, you can install with homebrew 29 | brew tap azure/functions 30 | brew install azure-functions-core-tools 31 | ``` 32 | 33 | ## Create a Python functions project 34 | ```sh 35 | $ func init v2functions --worker-runtime python 36 | ``` 37 | 38 | ### Create Python functions from templates 39 | List all python functions 40 | ```sh 41 | $ func templates list 42 | 43 | Python Templates: 44 | Azure Blob Storage trigger 45 | Azure Cosmos DB trigger 46 | Azure Event Grid trigger 47 | Azure Event Hub trigger 48 | HTTP trigger 49 | Azure Queue Storage trigger 50 | Azure Service Bus Queue trigger 51 | Azure Service Bus Topic trigger 52 | Timer trigger 53 | ... 54 | ``` 55 | 56 | Then, create a python function from templates 57 | ```sh 58 | # First of all, move to pythoh functions project top 59 | $ cd v2functions 60 | 61 | # Http Trigger functions 62 | $ func new --language python --template "HttpTrigger" --name HttpTriggerPY 63 | 64 | # Blob Trigger functions 65 | $ func new --language python --template "Azure Blob Storage trigger" --name BlobTriggerPY 66 | 67 | # Cosmos DB Trigger functions 68 | $ func new --language python --template "Azure Cosmos DB trigger" --name CosmosdbTriggerPY 69 | ``` 70 | 71 | ### Create and activate a virtual environment 72 | 73 | Create a virtual environment directory at the top of function directory 74 | ```sh 75 | cd functions 76 | python3.6 -m venv .env 77 | source .env/bin/activate 78 | ``` 79 | 80 | After activate your virtual enviroment for the function development, install packages you use in your functions (For example, `numpy`) 81 | ```sh 82 | pip install --upgrade pip 83 | pip install numpy 84 | ... 85 | ``` 86 | 87 | ## Manage package with requirements.txt 88 | 89 | When you develop locally using the Azure Functions Core Tools or VS Code, I guess you simply install your required python packages uinsg `pip`. It's OK in developing locally but when it comes to the production deployment, Please make sure that all your dependencies are listed in the `requirements.txt`, located at the root of your project directory. For example, here is a requirements.txt where I added my required packages and its version for my sample function (For minimum packages, please refer to [this](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python#python-version-and-package-management)) 90 | 91 | ```txt 92 | # Minimum packages for azure functions 93 | azure-functions 94 | azure-functions-worker 95 | grpcio==1.14.1 96 | grpcio-tools==1.14.1 97 | protobuf==3.6.1 98 | six==1.11.0 99 | 100 | # Additional packages 101 | numpy==1.15.4 102 | ``` 103 | 104 | Here is how you install packages listed in `requirements.txt` 105 | ```sh 106 | pip install -r requirements.txt 107 | ``` 108 | 109 | ### function.json 110 | Configure trigger, input and output binding with `function.json`. Please see [Azure Functions Python developer guide](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python) for the detail 111 | 112 | 113 | ## Update the host.json file to use extension bundles, 114 | 115 | In version 2.x of the Azure Functions runtime, you have to explicitly register the binding extensions that you use in your function app. To use extension bundles, update the `host.json` file to include the following entry for extensionBundle: 116 | > host.json 117 | ```json 118 | { 119 | "version": "2.0", 120 | "extensionBundle": { 121 | "id": "Microsoft.Azure.Functions.ExtensionBundle", 122 | "version": "[1.*, 2.0.0)" 123 | } 124 | } 125 | ``` 126 | > [NOTE] As an alternative way, you can manually install extension bundles by running a command - `func extensions install` so appropritate binding extensions are installed in `bin` directory. But if you already added the entry for extensionBundle in `host.json` like above, you don't need this. 127 | > ```bash 128 | > # change directory to a project directory 129 | > cd functions 130 | > # Manually install extension bundles using func command (Azure Core Tools) 131 | > func extensions install 132 | > ``` 133 | 134 | ## Run the function locally 135 | 136 | From inside the project directory (e.g. `v2functions`), run: 137 | 138 | ```sh 139 | func host start 140 | ``` 141 | For more detail, please refer to [Local development Azure Functions Core Tools](https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-register#local-development-azure-functions-core-tools) 142 | > 143 | 144 | ## Publishing to Azure 145 | 146 | ```sh 147 | APP_NAME="your function name" 148 | func azure functionapp publish $APP_NAME 149 | ``` 150 | 151 | If you got ERROR like this, do publish with `--build-native-deps` option 152 | ``` 153 | There was an error restoring dependencies.ERROR: cannot install vsts-cd-manager-1.0.2 dependency: binary dependencies without wheels are not supported. Use the --build-native-deps option to try building the binary dependenciesusing a Docker container. 154 | ``` 155 | Publishing with `--build-native-deps` option: 156 | ```sh 157 | func azure functionapp publish $APP_NAME --build-native-deps 158 | ``` 159 | 160 | If you're unable to import your modules, try publishing again using the `--no-bundler` option ( See also [this doc](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python) for more detail): 161 | ```sh 162 | func azure functionapp publish $APP_NAME --build-native-deps --no-bundler 163 | ``` 164 | 165 | 166 | ## LINKS 167 | - [Work with Azure Functions Core Tools V2](https://docs.microsoft.com/en-us/azure/azure-functions/functions-run-local#v2) 168 | - [Create your first Python function in Azure](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-python) 169 | - [Azure Functions Python developer guide](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python) 170 | -------------------------------------------------------------------------------- /handson/README.md: -------------------------------------------------------------------------------- 1 | # Hands-on: Serverless Application Development with Python (V1 Functions) 2 | 3 | | section | Description | link 4 | | ------------- | ------------- | ------------- | 5 | | Module0 | HTTP Trigter function that writes "Helloworld" as HTTP response | [manual(ja)](module0/README_ja.md) | 6 | | Module1 | HTTP Trigger function that writes feed URL into queue (HTTPトリガーからのフィード情報のキュー書き込み) | [manual(ja)](module1/README_ja.md) | 7 | | Module2 | Queue Trigger function that crawled site based on the feed URL and writes the results into Cosmos DB (Queueトリガーで取得したフィードURLを元にクローリングを行いその内容をCosmos DBに保存) | [manual(ja)](module2/README_ja.md) | 8 | | Special | How to change the Python version being used in a Function App (Pythonランタイムの変更方法) | [manual](../docs/custom-python-version.md) | 9 | -------------------------------------------------------------------------------- /handson/module0/README_ja.md: -------------------------------------------------------------------------------- 1 | # モジュール0 - Helloworld 2 | 3 | ## 1. Function Appの作成 (まだ作成していない場合のみ) 4 | 5 | * [Create a first Python Function in the Azure portal](https://github.com/yokawasa/azure-functions-python-samples/blob/master/docs/create-function-app-in-azure-portal.md)をベースにAzureポータルで作成 6 | 7 | ## 2. ソースコードをGithubよりダウンロード 8 | レポジトリ: https://github.com/yokawasa/azure-functions-python-samples 9 | 10 | ``` 11 | git clone https://github.com/yokawasa/azure-functions-python-samples.git 12 | ``` 13 | もしくはレポジトリからZIPで[ダウンロード](https://github.com/yokawasa/azure-functions-python-samples/archive/master.zip) 14 | 15 | モジュール0のマテリアル配置場所: azure-functions-python-samples/handson/module0配下 16 | 17 | ## 3. Functionのデプロイ 18 | 19 | Azureポータルまたはコマンドでデプロイ 20 | 21 | * Azureポータルの場合 22 | 手順については[こちら](../../docs/create-function-app-in-azure-portal.md)を参照ください 23 | 24 | * コマンドの場合 (ここではgit) 25 | 手順については[こちら](../../docs/local-git-deployment_ja.md)を参照ください 26 | 27 | ## 4. Functionのテスト 28 | 29 | [こちら](../../docs/create-function-app-in-azure-portal.md#test-the-function)と同様の方法で動作確認 30 | 31 | 余裕がある場合はソースコードのdump部分のコメントを外して、再デプロイして実行してみてください。Pythonランタイムのバージョンや環境変数が全て出力されます. 32 | 33 | ``` 34 | import os 35 | import platform 36 | import sys 37 | import json 38 | 39 | postreqdata = json.loads(open(os.environ['req']).read()) 40 | response = open(os.environ['res'], 'w') 41 | response.write("hello world from "+postreqdata['name']) 42 | response.close() 43 | 44 | ### dump 45 | #print("Python Version = '{0}'".format(platform.python_version())) 46 | #print(sys.version_info) 47 | #for e in os.environ: 48 | # print ("{}->{}".format(e, os.environ[e])) 49 | ``` 50 | 51 | -------------------------------------------------------------------------------- /handson/module0/http-trigger-helloworld/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "type": "http", 5 | "direction": "out", 6 | "name": "res" 7 | }, 8 | { 9 | "type": "httpTrigger", 10 | "name": "req", 11 | "authLevel": "function", 12 | "methods": [ 13 | "get", 14 | "post", 15 | "delete", 16 | "head", 17 | "patch", 18 | "put", 19 | "options", 20 | "trace" 21 | ], 22 | "direction": "in" 23 | } 24 | ], 25 | "disabled": false 26 | } 27 | -------------------------------------------------------------------------------- /handson/module0/http-trigger-helloworld/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 5 | Azure Functions HTTP Trigger Python Sample 6 | 7 | """ 8 | 9 | import os 10 | import platform 11 | import sys 12 | import json 13 | 14 | postreqdata = json.loads(open(os.environ['req']).read()) 15 | response = open(os.environ['res'], 'w') 16 | response.write("hello world from "+postreqdata['name']) 17 | response.close() 18 | 19 | ### dump 20 | #print("Python Version = '{0}'".format(platform.python_version())) 21 | #print(sys.version_info) 22 | #for e in os.environ: 23 | # print ("{}->{}".format(e, os.environ[e])) 24 | -------------------------------------------------------------------------------- /handson/module1/README_ja.md: -------------------------------------------------------------------------------- 1 | # モジュール1 - HTTPトリガーからのフィード情報のキュー書き込み 2 | 3 | HTTPトリガーでPOSTリクエストから受け取ったRSSフィードURL をキューメッセージとして書き込みます 4 | 5 | モジュール1とモジュール2はセットになります。 6 | 7 | ## 1. Function Appの作成 (まだ作成していない場合のみ) 8 | 9 | * [Create a first Python Function in the Azure portal](https://github.com/yokawasa/azure-functions-python-samples/blob/master/docs/create-function-app-in-azure-portal.md)をベースにAzureポータルで作成 10 | 11 | ## 2. ソースコードをGithubよりダウンロード(まだの場合のみ) 12 | レポジトリ: https://github.com/yokawasa/azure-functions-python-samples 13 | 14 | ``` 15 | git clone https://github.com/yokawasa/azure-functions-python-samples.git 16 | ``` 17 | もしくはレポジトリからZIPで[ダウンロード](https://github.com/yokawasa/azure-functions-python-samples/archive/master.zip) 18 | 19 | モジュール1のマテリアル配置場所: azure-functions-python-samples/handson/module1配下 20 | 21 | ## 3. ストレージアカウント 22 | 23 | RSSフィードURL書き込み用にキューストレージが必要。基本的に1のFunctions App作成時に作るストテージアカウントを使用する。もしFunction App用と分けたい場合、別途作成いただいても問題ありません。ストレージの種類は汎用ストレージを選択ください。 24 | 25 | * [ストレージアカウントの作成方法](https://docs.microsoft.com/ja-jp/azure/storage/common/storage-create-storage-account#create-a-storage-account) 26 | 27 | ## 4. Functionのデプロイ 28 | 29 | Azureポータルまたはコマンドでデプロイ 30 | 31 | * Azureポータルの場合 32 | 手順については[こちら](../../docs/create-function-app-in-azure-portal.md)を参照ください 33 | 34 | * コマンドの場合 (ここではgit) 35 | 手順については[こちら](../../docs/local-git-deployment_ja.md)を参照ください 36 | 37 | ## 5. Outバインディングのストレージ文字列の設定 38 | 39 | 下記function.jsonのOutバインディングのストレージ文字列("STORAGE_CONNECTION"にあたる)の設定を行う。 40 | 41 | ``` 42 | { 43 | "bindings": [ 44 | { 45 | "authLevel": "function", 46 | "type": "httpTrigger", 47 | "direction": "in", 48 | "name": "req", 49 | "methods": [ 50 | "post" 51 | ] 52 | }, 53 | { 54 | "type": "http", 55 | "direction": "out", 56 | "name": "res" 57 | }, 58 | { 59 | "type": "queue", 60 | "name": "outputQueueItem", 61 | "queueName": "rssfeedqueue", 62 | "connection": "STORAGE_CONNECTION", 63 | "direction": "out" 64 | } 65 | ], 66 | "disabled": false 67 | } 68 | ``` 69 | 70 | 設定は2とおり 71 | 72 | 1. App SettingsにSTORAGE_CONNECTIONの変数とその値を追加 73 | ![](../../img/handson-mod1-appsetting.png) 74 | 75 | 2. ポータルのFunction UIのIntegrationページでストレージ文字列の設定をする 76 | ![](../../img/handson-mod1-integration.png) 77 | 78 | ## 6. Functionのテスト 79 | 80 | HTTP POSTリクエストを送信してRSSフィードURLが最終的に指定のキューに入ることを確認する. RSSフィード送信用パラメータfeedurlにRSSフィードURLを指定する 81 | 82 | POSTリクエスト送信例: 83 | ``` 84 | curl -s -H "Content-Type: application/json; charset=UTF-8" -XPOST "https://yoichikademo27.azurewebsites.net/api/http-trigger-feed-to-queue?code=dTtNrLDYaaOrF3Gl6lkZfPRMB7Z9I47wYyJhCUWbvnHrzgOUJTp2dw==" -d'{ "feedurl": "https://azure.microsoft.com/en-us/blog/feed/" }' 85 | ``` 86 | 87 | キューの確認方法 (キュー名:rssfeedqueue ) 88 | Azureポータルもしくは[ストレージエクスプローラー](https://azure.microsoft.com/ja-jp/features/storage-explorer/)でキュー名rssfeedqueueの中を確認ください 89 | -------------------------------------------------------------------------------- /handson/module1/http-trigger-feed-to-queue/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "type": "httpTrigger", 6 | "direction": "in", 7 | "name": "req", 8 | "methods": [ 9 | "post" 10 | ] 11 | }, 12 | { 13 | "type": "http", 14 | "direction": "out", 15 | "name": "res" 16 | }, 17 | { 18 | "type": "queue", 19 | "name": "outputQueueItem", 20 | "queueName": "rssfeedqueue", 21 | "connection": "STORAGE_CONNECTION", 22 | "direction": "out" 23 | } 24 | ], 25 | "disabled": false 26 | } 27 | -------------------------------------------------------------------------------- /handson/module1/http-trigger-feed-to-queue/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 5 | Azure Functions HTTP Trigger Python Sample 6 | - Get RSS feed URL from HTTP Post and add it onto Queue 7 | 8 | """ 9 | 10 | import os 11 | import json 12 | 13 | ## Open HTTP Handle + Get Post Data 14 | postreqdata = json.loads(open(os.environ['req']).read()) 15 | response = open(os.environ['res'], 'w') 16 | 17 | ## Out Queue Open and Write feedurl + Close 18 | if 'feedurl' not in postreqdata: 19 | print ("No feedurl included in request body!") 20 | response.write("NG: No feedurl included in request body!") 21 | else: 22 | outqueue = open(os.environ['outputQueueItem'], 'w') 23 | outqueue.write(postreqdata['feedurl']) 24 | outqueue.close() 25 | response.write("OK") 26 | 27 | ## Close HTTP Handle 28 | response.close() 29 | -------------------------------------------------------------------------------- /handson/module1/http-trigger-feed-to-queue/send-https-requests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | #api_url="AZURE_FUNCTION_URL ex https://.azurewebsites.net/api/?code=xxxxx" 4 | api_url="https://yoichikademo27.azurewebsites.net/api/http-trigger-feed-to-queue?code=dTtNrLDYaaOrF3Gl6lkZfPRMB7Z9I47wYyJhCUWbvnHrzgOUJTp2dw==" 5 | 6 | echo "Sending HTTP POST Request............." 7 | curl -s\ 8 | -H "Content-Type: application/json; charset=UTF-8"\ 9 | -XPOST ${api_url} -d'{ 10 | "feedurl": "https://azure.microsoft.com/en-us/blog/feed/" 11 | }' 12 | 13 | echo "" 14 | -------------------------------------------------------------------------------- /handson/module2/README_ja.md: -------------------------------------------------------------------------------- 1 | # モジュール2 - Queueトリガーで取得したフィードURLを元にクローリングを行いその内容をCosmosDB (Document DB)に保存  2 | 3 | Queueトリガーで取得したフィードURLを元にクローリングを行いその内容をCosmosDB (Document DB)に保存します. 4 | 5 | モジュール1とモジュール2はセットになります。 6 | 7 | ## 1. Function Appの作成 (まだ作成していない場合のみ) 8 | 9 | * [Create a first Python Function in the Azure portal](https://github.com/yokawasa/azure-functions-python-samples/blob/master/docs/create-function-app-in-azure-portal.md)をベースにAzureポータルで作成 10 | 11 | ## 2. ソースコードをGithubよりダウンロード(まだの場合のみ) 12 | レポジトリ: https://github.com/yokawasa/azure-functions-python-samples 13 | 14 | ``` 15 | git clone https://github.com/yokawasa/azure-functions-python-samples.git 16 | ``` 17 | もしくはレポジトリからZIPで[ダウンロード](https://github.com/yokawasa/azure-functions-python-samples/archive/master.zip) 18 | 19 | モジュール1のマテリアル配置場所: azure-functions-python-samples/handson/module1配下 20 | 21 | ## 3. CosmosDBアカウントの作成 22 | 23 | * [CosmosDBアカウントの作成方法](https://docs.microsoft.com/ja-jp/azure/cosmos-db/tutorial-develop-documentdb-dotnet#create-an-azure-cosmos-db-account) 24 | 25 | ## 4. Functionのデプロイ 26 | 27 | Azureポータルまたはコマンドでデプロイ 28 | 29 | * Azureポータルの場合 30 | 手順については[こちら](../../docs/create-function-app-in-azure-portal.md)を参照ください 31 | 32 | * コマンドの場合 (ここではgit) 33 | 手順については[こちら](../../docs/local-git-deployment_ja.md)を参照ください 34 | 35 | ## 5. Pythonライブラリの追加 (feedparser) 36 | 37 | Kudu UI >> CMDプロンプトで次の手順によりpipを使ってライブラリをインストールする. 特権権限が必要であるためKudu UIからの実行が必要 38 | Virtualenvを利用して今回のFunction用ディレクトリ(プロジェクト)ごとにインストールしていることが特徴 39 | 40 | ``` 41 | D:\home> 42 | D:\home> cd site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb 43 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb> python -m pip list 44 | pip (1.5.6) 45 | setuptools (6.0.2) 46 | virtualenv (1.11.6) 47 | 48 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb> python -m virtualenv myenv 49 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb> cd myenv\Scripts 50 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts> activate.bat 51 | (myenv) D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts> 52 | (myenv) D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts>python -m pip install -U pip 53 | Downloading/unpacking pip from https://pypi.python.org/packages/b6/ac/7015eb97dc749283ffdec1c3a88ddb8ae03b8fad0f0e611408f196358da3/pip-9.0.1-py2.py3-none-any.whl#md5=297dbd16ef53bcef0447d245815f5144 54 | Installing collected packages: pip 55 | Found existing installation: pip 1.5.6 56 | Uninstalling pip: 57 | Successfully uninstalled pip 58 | Successfully installed pip 59 | Cleaning up... 60 | (myenv) D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts>python -m pip install feedparser 61 | ``` 62 | 63 | ## 6. キューTrigger用のストレージ文字列とOutバインディングのCosmosDB文字列の設定 64 | 65 | 下記function.jsonのストレージ文字列("STORAGE_CONNECTION"にあたる)とDOCUMENTDB_CONNECTION設定を行う  66 | 67 | ``` 68 | { 69 | "bindings": [ 70 | { 71 | "type": "documentDB", 72 | "name": "outputDocument", 73 | "databaseName": "mydb", 74 | "collectionName": "rssfeeds", 75 | "createIfNotExists": true, 76 | "connection": "DOCUMENTDB_CONNECTIOJN", 77 | "direction": "out" 78 | }, 79 | { 80 | "type": "queueTrigger", 81 | "name": "inputMessage", 82 | "queueName": "rssfeedqueue", 83 | "connection": "STORAGE_CONNECTION", 84 | "direction": "in" 85 | } 86 | ], 87 | "disabled": false 88 | } 89 | ``` 90 | 91 | 設定は2とおり 92 | 93 | 1. App SettingsにSTORAGE_CONNECTION, DOCUMENTDB_CONNECTIONの変数とその値を追加 94 | ![](../../img/handson-mod2-appsetting.png) 95 | 96 | 2. ポータルのFunction UIのIntegrationページでストレージ文字列の設定をする 97 | DOCUMENTDB_CONNECTIONの場合 98 | ![](../../img/handson-mod2-integration-doc.png) 99 | STORAGE_CONNECTIONの場合 100 | ![](../../img/handson-mod2-integration-queue.png) 101 | 102 | ## 7. Functionのテスト 103 | 104 | module1の続きとなる。module1でrssfeedqueueにRSSフィードが格納されると、本module2のキュートリガーによりFunctionがキックされる。RSSフィードURLを元にfeedparserライブラリを使ってクローリングが行われ、その内容がCosmosDB(DocumentDB)に格納される。最終的にCosmosDBに」データが格納されることを確認する 105 | 106 | AzureポータルのCosmosDB Data Explorerから該当のコレクションに対してクローリングされた内容が格納されていることを確認する 107 | ![](../../img/handson-mod2-test-cosmosdb.png) 108 | 109 | [ストレージエクスプローラー](https://azure.microsoft.com/ja-jp/features/storage-explorer/)からもCosmosDBの内容が確認可能 110 | -------------------------------------------------------------------------------- /handson/module2/queue-trigger-rss-crawl-out-cosmosdb/README.md: -------------------------------------------------------------------------------- 1 | # queue-trigger-rssfeed-crawler 2 | Azure Functions Queue Trigger Python Sample that get RSS feed URL from Queue and dump all items that obtained from RSS feed 3 | 4 | 5 | ## Add RSS feeds to Queue to trigger the function 6 | ``` 7 | https://azure.microsoft.com/en-us/blog/feed/ 8 | https://azure.microsoft.com/en-us/updates/feed/ 9 | ... 10 | ``` 11 | 12 | 13 | 14 | ## How to Install module 15 | 16 | 17 | ``` 18 | D:\home> 19 | D:\home> cd site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb 20 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb> python -m pip list 21 | pip (1.5.6) 22 | setuptools (6.0.2) 23 | virtualenv (1.11.6) 24 | 25 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb> python -m virtualenv myenv 26 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb> cd myenv\Scripts 27 | D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts> activate.bat 28 | (myenv) D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts> 29 | (myenv) D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts>python -m pip install -U pip 30 | Downloading/unpacking pip from https://pypi.python.org/packages/b6/ac/7015eb97dc749283ffdec1c3a88ddb8ae03b8fad0f0e611408f196358da3/pip-9.0.1-py2.py3-none-any.whl#md5=297dbd16ef53bcef0447d245815f5144 31 | Installing collected packages: pip 32 | Found existing installation: pip 1.5.6 33 | Uninstalling pip: 34 | Successfully uninstalled pip 35 | Successfully installed pip 36 | Cleaning up... 37 | (myenv) D:\home\site\wwwroot\queue-trigger-rss-crawl-out-cosmosdb\myenv\Scripts>python -m pip install 38 | 39 | ``` 40 | 41 | -------------------------------------------------------------------------------- /handson/module2/queue-trigger-rss-crawl-out-cosmosdb/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "type": "documentDB", 5 | "name": "outputDocument", 6 | "databaseName": "mydb", 7 | "collectionName": "rssfeeds", 8 | "createIfNotExists": true, 9 | "connection": "DOCUMENTDB_CONNECTIOJN", 10 | "direction": "out" 11 | }, 12 | { 13 | "type": "queueTrigger", 14 | "name": "inputMessage", 15 | "queueName": "rssfeedqueue", 16 | "connection": "STORAGE_CONNECTION", 17 | "direction": "in" 18 | } 19 | ], 20 | "disabled": false 21 | } 22 | -------------------------------------------------------------------------------- /handson/module2/queue-trigger-rss-crawl-out-cosmosdb/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 5 | Azure Functions Queue Trigger Python Sample 6 | - Get RSS feed URL from Queue and store all items that obtained from RSS feed 7 | 8 | """ 9 | 10 | import os 11 | import sys 12 | import json 13 | import hashlib 14 | #sys.path.append("site-packages") 15 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname( __file__ ), 'myenv/Lib/site-packages'))) 16 | import feedparser 17 | 18 | # Read the queue message 19 | rss_feed_url = open(os.environ['inputMessage']).read() 20 | #rss_feed_url = "https://azure.microsoft.com/en-us/blog/feed/" 21 | print "Python script processes rss feed: '{0}'".format(rss_feed_url) 22 | 23 | # Get RSS feed by using feedparser module 24 | feed=feedparser.parse(rss_feed_url) 25 | 26 | # Collect all RSS feed obtained and store them into Document DB 27 | outdocs=[] 28 | for entry in feed[ 'entries' ]: 29 | idhash = hashlib.sha1( entry[ 'link' ]).hexdigest() 30 | outdoc= { 31 | "id": idhash, 32 | "title": entry[ 'title' ].encode('utf-8'), 33 | "description": entry[ 'description' ].encode('utf-8'), 34 | "date": entry[ 'updated' ] 35 | } 36 | print(outdoc) 37 | outdocs.append(outdoc) 38 | 39 | # Writing to DocumentDB (Document parameter name: outputDocument) 40 | with open(os.environ['outputDocument'], 'wb') as f: 41 | json.dump(outdocs,f) 42 | -------------------------------------------------------------------------------- /img/azure-function-x-python.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/azure-function-x-python.png -------------------------------------------------------------------------------- /img/cognitive-computer-vision-sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/cognitive-computer-vision-sample.png -------------------------------------------------------------------------------- /img/custom-python-version-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/custom-python-version-1.png -------------------------------------------------------------------------------- /img/custom-python-version-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/custom-python-version-2.png -------------------------------------------------------------------------------- /img/custom-python-version-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/custom-python-version-3.png -------------------------------------------------------------------------------- /img/custom-python-version-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/custom-python-version-4.png -------------------------------------------------------------------------------- /img/custom-python-version-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/custom-python-version-5.png -------------------------------------------------------------------------------- /img/handson-mod1-appsetting.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/handson-mod1-appsetting.png -------------------------------------------------------------------------------- /img/handson-mod1-integration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/handson-mod1-integration.png -------------------------------------------------------------------------------- /img/handson-mod2-appsetting.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/handson-mod2-appsetting.png -------------------------------------------------------------------------------- /img/handson-mod2-integration-doc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/handson-mod2-integration-doc.png -------------------------------------------------------------------------------- /img/handson-mod2-integration-queue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/handson-mod2-integration-queue.png -------------------------------------------------------------------------------- /img/handson-mod2-test-cosmosdb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/img/handson-mod2-test-cosmosdb.png -------------------------------------------------------------------------------- /scripts/create-azfunc-v2-linux-app-service-plan.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | # 4 | # Create the Azure Functions on Linux ( App Service Plan ) 5 | # 6 | 7 | RESOURCE_GROUP="" 8 | REGION="" 9 | STORAGE_ACCOUNT="" 10 | PLAN_NAME="" 11 | APP_NAME="" # the name needs to be unique across all apps in Azure. 12 | 13 | echo "Create App Service Plan" 14 | az appservice plan create --name $PLAN_NAME \ 15 | --resource-group $RESOURCE_GROUP \ 16 | --sku B1 --is-linux 17 | 18 | echo "Create a empty function app on Linux (App Service Plan): $APP_NAME" 19 | az functionapp create \ 20 | --resource-group $RESOURCE_GROUP \ 21 | --name $APP_NAME \ 22 | --storage-account $STORAGE_ACCOUNT \ 23 | --plan $PLAN_NAME \ 24 | --runtime python 25 | 26 | #echo "Clearning up all resources" 27 | # az group delete --name $RESOURCE_GROUP 28 | 29 | echo "Done" 30 | -------------------------------------------------------------------------------- /scripts/create-azfunc-v2-linux-consumption-plan.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | # 4 | # Create the Azure Functions on Linux ( Consumption Plan ) 5 | # [NOTE] Linux Consumption Preview is in preview 6 | 7 | RESOURCE_GROUP="" 8 | REGION="" 9 | STORAGE_ACCOUNT="" 10 | APP_NAME="" # the name needs to be unique across all apps in Azure. 11 | # [NOTE] 12 | # Linux Consumption plan is only available in limited regions 13 | # see https://github.com/Azure/Azure-Functions/wiki/Azure-Functions-on-Linux-Preview#prerequisites 14 | 15 | RESOURCE_GROUP="RG-azfuncv2-t" 16 | REGION="westus" 17 | STORAGE_ACCOUNT="azfuncv2linuxstore2" 18 | APP_NAME="yoichikaazfuncv2linux002" # the name needs to be unique across all apps in Azure. 19 | 20 | echo "Create a empty function app on Linux (Consumption Plan): $APP_NAME" 21 | az functionapp create \ 22 | --resource-group $RESOURCE_GROUP \ 23 | --name $APP_NAME \ 24 | --storage-account $STORAGE_ACCOUNT \ 25 | --os-type Linux \ 26 | --consumption-plan-location $REGION \ 27 | --runtime python 28 | 29 | #echo "Clearning up all resources" 30 | # az group delete --name $RESOURCE_GROUP 31 | 32 | echo "Done" 33 | -------------------------------------------------------------------------------- /scripts/create-cognitive-computer-vision.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Create Cognitive Computer Vision Resource 4 | # 5 | 6 | COGNITIVE_RESOURCE_GROUP="" 7 | REGION="" 8 | COGNITIVE_ACCOUNT_NAME="" 9 | 10 | ## KIND 11 | # You can get list of kinds with the following command: 12 | # az cognitiveservices account list-kinds 13 | # [ 14 | # "AnomalyDetector", 15 | # "Bing.Autosuggest.v7", 16 | # "Bing.CustomSearch", 17 | # "Bing.EntitySearch", 18 | # "Bing.Search.v7", 19 | # "Bing.SpellCheck.v7", 20 | # "CognitiveServices", 21 | # "ComputerVision", 22 | # "ContentModerator", 23 | # "CustomVision.Prediction", 24 | # "CustomVision.Training", 25 | # "Dummy", 26 | # "Face", 27 | # "InkRecognizer", 28 | # "Internal.AllInOne", 29 | # "LUIS", 30 | # "Personalizer", 31 | # "QnAMaker", 32 | # "SpeakerRecognition", 33 | # "SpeechServices", 34 | # "TextAnalytics", 35 | # "TextTranslation" 36 | # ] 37 | 38 | echo "Create Resource Group: $COGNITIVE_RESOURCE_GROUP" 39 | az group create --name $COGNITIVE_RESOURCE_GROUP --location $REGION 40 | 41 | echo "Create Cognitive Resource for Computer Vision: $COGNITIVE_ACCOUNT_NAME" 42 | az cognitiveservices account create \ 43 | -n $COGNITIVE_ACCOUNT_NAME \ 44 | -g $COGNITIVE_RESOURCE_GROUP \ 45 | --kind ComputerVision \ 46 | --sku S1 \ 47 | -l $REGION \ 48 | --yes 49 | 50 | ## NOTE 51 | ## `--yes`: Do not prompt for terms confirmation. 52 | 53 | API_ENDPOINT=$(az cognitiveservices account show -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 54 | API_KEY=$(az cognitiveservices account keys list -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 55 | 56 | echo "API Endpoint: ${API_ENDPOINT}" 57 | echo "API KEY: ${API_KEY}" 58 | 59 | echo "Done" 60 | -------------------------------------------------------------------------------- /scripts/create-cosmosdb-test-db-coll.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e -x 3 | 4 | COSMOSDB_ACCOUNT_NAME="" 5 | RESOURCE_GROUP="" 6 | DATABASE_NAME="" 7 | CREATE_LEASE_COLLECTION=1 # yes,no=(1,0) 8 | LEASES_COLLECTION_NAME="" 9 | 10 | az cosmosdb create \ 11 | --name $COSMOSDB_ACCOUNT_NAME \ 12 | --kind GlobalDocumentDB \ 13 | --resource-group $RESOURCE_GROUP 14 | # Get Key 15 | COSMOSDB_KEY=$(az cosmosdb list-keys --name $COSMOSDB_ACCOUNT_NAME --resource-group $RESOURCE_GROUP --output tsv |awk '{print $1}') 16 | echo "Cosmos DB Key: $COSMOSDB_KEY" 17 | 18 | # Create Database 19 | az cosmosdb database create \ 20 | --name $COSMOSDB_ACCOUNT_NAME \ 21 | --db-name $DATABASE_NAME \ 22 | --key $COSMOSDB_KEY \ 23 | --resource-group $RESOURCE_GROUP 24 | 25 | # Create a container with a partition key and provision 400 RU/s throughput. 26 | COLLECTION_NAME="testcol01" 27 | az cosmosdb collection create \ 28 | --resource-group $RESOURCE_GROUP \ 29 | --collection-name $COLLECTION_NAME \ 30 | --name $COSMOSDB_ACCOUNT_NAME \ 31 | --db-name $DATABASE_NAME \ 32 | --partition-key-path /name \ 33 | --throughput 400 34 | 35 | COLLECTION_NAME="testcol02" 36 | az cosmosdb collection create \ 37 | --resource-group $RESOURCE_GROUP \ 38 | --collection-name $COLLECTION_NAME \ 39 | --name $COSMOSDB_ACCOUNT_NAME \ 40 | --db-name $DATABASE_NAME \ 41 | --partition-key-path /name \ 42 | --throughput 400 43 | 44 | # Create a container for leaves 45 | # 'leaves' need to be a single collection partition 46 | # Please see also: https://github.com/Azure/azure-functions-core-tools/issues/930 47 | if [ $CREATE_LEASE_COLLECTION -gt 0 ] 48 | then 49 | az cosmosdb collection create \ 50 | --resource-group $RESOURCE_GROUP \ 51 | --collection-name $LEASES_COLLECTION_NAME \ 52 | --name $COSMOSDB_ACCOUNT_NAME \ 53 | --db-name $DATABASE_NAME \ 54 | --throughput 400 55 | fi 56 | -------------------------------------------------------------------------------- /scripts/create-resource-group.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | # Create Azure Resource group 4 | 5 | RESOURCE_GROUP="" 6 | REGION="" 7 | 8 | RESOURCE_GROUP="RG-azfuncv2-ta" 9 | REGION="japanwest" 10 | 11 | echo "Create Resource Group: $RESOURCE_GROUP" 12 | az group create --name $RESOURCE_GROUP --location $REGION 13 | 14 | echo "Done" 15 | -------------------------------------------------------------------------------- /scripts/create-storage-account.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | # Create Azure Storage Account 4 | 5 | RESOURCE_GROUP="" 6 | REGION="" 7 | STORAGE_ACCOUNT="" 8 | 9 | RESOURCE_GROUP="RG-azfuncv2-ta" 10 | REGION="japanwest" 11 | STORAGE_ACCOUNT="azfuncv2linuxstore3" 12 | 13 | echo "Create an Azure Storage account: $STORAGE_ACCOUNT" 14 | az storage account create --name $STORAGE_ACCOUNT \ 15 | --location $REGION \ 16 | --resource-group $RESOURCE_GROUP \ 17 | --sku Standard_LRS 18 | 19 | echo "Done" 20 | -------------------------------------------------------------------------------- /scripts/docker-build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | 4 | cwd=`dirname "$0"` 5 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 6 | 7 | DOCKER_ID="" 8 | CONTAINER_IMAGE_NAME="" 9 | 10 | FUNC_PROJECT_DIR="$cwd/../v2functions" 11 | 12 | cd $FUNC_PROJECT_DIR 13 | TAG=`cat VERSION` 14 | docker build --tag $DOCKER_ID/$CONTAINER_IMAGE_NAME:$TAG . 15 | -------------------------------------------------------------------------------- /scripts/docker-run-mini.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | 4 | cwd=`dirname "$0"` 5 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 6 | 7 | ############################################################# 8 | # params 9 | ############################################################# 10 | DOCKER_ID="" 11 | CONTAINER_IMAGE_NAME="" 12 | RESOURCE_GROUP="" 13 | STORAGE_ACCOUNT_NAME="" 14 | ############################################################# 15 | 16 | FUNC_PROJECT_DIR="$cwd/../v2functions" 17 | 18 | TAG=`cat $FUNC_PROJECT_DIR/VERSION` 19 | 20 | STORAGE_CONNECTION_STRING=$(az storage account show-connection-string \ 21 | --resource-group $RESOURCE_GROUP --name $STORAGE_ACCOUNT_NAME \ 22 | --query connectionString --output tsv) 23 | 24 | docker run -p 8080:80 -it \ 25 | -e AzureWebJobsStorage="$STORAGE_CONNECTION_STRING" \ 26 | -e AzureFunctionsJobHost__Logging__Console__IsEnabled="true" \ 27 | $DOCKER_ID/$CONTAINER_IMAGE_NAME:$TAG 28 | -------------------------------------------------------------------------------- /scripts/docker-run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | 4 | cwd=`dirname "$0"` 5 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 6 | 7 | ############################################################# 8 | # params 9 | ############################################################# 10 | DOCKER_ID="" 11 | CONTAINER_IMAGE_NAME="" 12 | RESOURCE_GROUP="" 13 | REGION="" 14 | STORAGE_ACCOUNT_NAME="" 15 | COSMOSDB_ACCOUNT_NAME="" 16 | COGNITIVE_ACCOUNT_NAME="" 17 | COGNITIVE_RESOURCE_GROUP="" 18 | ############################################################# 19 | 20 | FUNC_PROJECT_DIR="$cwd/../v2functions" 21 | 22 | TAG=`cat $FUNC_PROJECT_DIR/VERSION` 23 | 24 | STORAGE_CONNECTION_STRING=$(az storage account show-connection-string \ 25 | --resource-group $RESOURCE_GROUP --name $STORAGE_ACCOUNT_NAME \ 26 | --query connectionString --output tsv) 27 | COSMOSDB_CONNECTION_STRING=$(az cosmosdb list-connection-strings \ 28 | --resource-group $RESOURCE_GROUP --name $COSMOSDB_ACCOUNT_NAME \ 29 | --query connectionStrings --output tsv | head -1 | awk '{print $1}') 30 | 31 | COMPUTER_VSION_API_ENDPOINT=$(az cognitiveservices account show \ 32 | -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 33 | 34 | COMPUTER_VSION_API_SUBSCRIPTION=$(az cognitiveservices account keys list \ 35 | -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 36 | 37 | 38 | docker run -p 8080:80 -it \ 39 | -e AzureWebJobsStorage="$STORAGE_CONNECTION_STRING" \ 40 | -e MyStorageConnectionString="$STORAGE_CONNECTION_STRING" \ 41 | -e MyCosmosDBConnectionString="$COSMOSDB_CONNECTION_STRING" \ 42 | -e ComputerVisionSubscription="$COMPUTER_VSION_API_SUBSCRIPTION" \ 43 | -e ComputerVisionApiEndpoint="$COMPUTER_VSION_API_ENDPOINT" \ 44 | -e AzureFunctionsJobHost__Logging__Console__IsEnabled="true" \ 45 | $DOCKER_ID/$CONTAINER_IMAGE_NAME:$TAG 46 | -------------------------------------------------------------------------------- /scripts/get-blob-sas-token.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | api_url="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 4 | api_key="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 5 | 6 | echo "Sending HTTP POST Request............." 7 | curl -s\ 8 | -H "Content-Type: application/json; charset=UTF-8"\ 9 | -H "x-functions-key: ${api_key}"\ 10 | -XPOST ${api_url} -d'{ 11 | "permission": "rl", 12 | "container": "functiontest", 13 | "blobname": "sample.jpg", 14 | "ttl": 1 15 | }' 16 | echo "" 17 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/blob-container-create: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 1 ] 8 | then 9 | echo "$0 [container]" 10 | exit 11 | fi 12 | 13 | CONTAINER=$1 14 | echo "storage container: $CONTAINER" 15 | 16 | az storage container create --name $CONTAINER --connection-string $STORAGE_CONNECTION_STRING 17 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/blob-delete: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 2 ] 8 | then 9 | echo "$0 [container] [blobname]" 10 | exit 11 | fi 12 | 13 | CONTAINER=$1 14 | BLOB_NAME=$2 15 | echo "storage container: $CONTAINER" 16 | echo "blob name: $BLOB_NAME" 17 | 18 | az storage blob delete -c $CONTAINER -n $BLOB_NAME --connection-string $STORAGE_CONNECTION_STRING 19 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/blob-list: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 1 ] 8 | then 9 | echo "$0 [container]" 10 | exit 11 | fi 12 | 13 | CONTAINER=$1 14 | echo "storage container: $CONTAINER" 15 | 16 | # List blobs in the container 17 | az storage blob list --container-name $CONTAINER --output table --connection-string $STORAGE_CONNECTION_STRING 18 | 19 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/blob-upload: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 2 ] 8 | then 9 | echo "$0 [container] [file]" 10 | exit 11 | fi 12 | 13 | CONTAINER=$1 14 | LOCAL_FILE=$2 15 | FILE_NAME=$(basename $LOCAL_FILE) 16 | echo "storage container: $CONTAINER" 17 | echo "blob name: $BLOB_NAME" 18 | 19 | az storage blob upload --container-name $CONTAINER --name $FILE_NAME --file $LOCAL_FILE --connection-string $STORAGE_CONNECTION_STRING 20 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/env.conf: -------------------------------------------------------------------------------- 1 | #================================================== 2 | # Enviroments Properties 3 | #================================================== 4 | 5 | AZURITE_DIR="/tmp/azurite" 6 | 7 | STORAGE_CONNECTION_STRING="DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;QueueEndpoint=http://127.0.0.1:10001/devstoreaccount1;TableEndpoint=http://127.0.0.1:10002/devstoreaccount1"; 8 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/queue-create: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 1 ] 8 | then 9 | echo "$0 [queuename]" 10 | exit 11 | fi 12 | 13 | QUEUE_NAME=$1 14 | echo "queue name: $QUEUE_NAME" 15 | 16 | az storage queue create --name $QUEUE_NAME --connection-string $STORAGE_CONNECTION_STRING 17 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/queue-delete: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 1 ] 8 | then 9 | echo "$0 [queuename]" 10 | exit 11 | fi 12 | 13 | QUEUE_NAME=$1 14 | echo "queue name: $QUEUE_NAME" 15 | 16 | az storage queue delete --name $QUEUE_NAME --connection-string $STORAGE_CONNECTION_STRING 17 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/queue-list: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | az storage queue list --connection-string $STORAGE_CONNECTION_STRING 8 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/queue-message-get: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 1 ] 8 | then 9 | echo "$0 [queuename]" 10 | exit 11 | fi 12 | 13 | QUEUE_NAME=$1 14 | echo "queue name: $QUEUE_NAME" 15 | 16 | az storage message get --queue-name $QUEUE_NAME --connection-string $STORAGE_CONNECTION_STRING 17 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/queue-message-put: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ $# -ne 2 ] 8 | then 9 | echo "$0 [queuename] [content]" 10 | exit 11 | fi 12 | 13 | QUEUE_NAME=$1 14 | CONTENT=$2 15 | echo "queue name: $QUEUE_NAME" 16 | echo "queue content: $CONTENT" 17 | 18 | az storage message put --queue-name $QUEUE_NAME --content $CONTENT --connection-string $STORAGE_CONNECTION_STRING 19 | -------------------------------------------------------------------------------- /scripts/local-dev-helpers/start-azurite: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cwd=`dirname "$0"` 4 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 5 | . $cwd/env.conf 6 | 7 | if [ ! -d ${AZURITE_DIR} ] 8 | then 9 | mkdir -p ${AZURITE_DIR} 10 | fi 11 | 12 | azurite -l ${AZURITE_DIR} 13 | -------------------------------------------------------------------------------- /scripts/send-test-blob-sas-token.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # api_url="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 4 | # api_key="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 5 | 6 | # func locally 7 | api_url="http://localhost:7071/api/http-trigger-blob-sas-token" 8 | # Docker 9 | # api_url="http://localhost:8080/api/http-trigger-blob-sas-token" 10 | api_key="" 11 | 12 | echo "Sending HTTP POST Request............." 13 | curl -s\ 14 | -H "Content-Type: application/json; charset=UTF-8"\ 15 | -H "x-functions-key: ${api_key}"\ 16 | -XPOST ${api_url} -d'{ 17 | "permission": "rl", 18 | "container": "functiontest", 19 | "blobname": "sample.jpg", 20 | "ttl": 2 21 | }' 22 | echo "" 23 | -------------------------------------------------------------------------------- /scripts/setup-az-for-azfunc-linux-preview.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e -x 4 | 5 | echo "Installing the Azure CLI extension for the Azure Functions Linux Consumption preview..." 6 | 7 | curl "https://functionscdn.azureedge.net/public/docs/functionapp-0.0.1-py2.py3-none-any.whl" -o functionapp-0.0.1-py2.py3-none-any.whl 8 | az extension add --source functionapp-0.0.1-py2.py3-none-any.whl 9 | -------------------------------------------------------------------------------- /scripts/update-azfunc-app-settings.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e -x 3 | 4 | RESOURCE_GROUP="" 5 | APP_NAME="" 6 | STORAGE_ACCOUNT_NAME="" 7 | COSMOSDB_ACCOUNT_NAME="" 8 | COGNITIVE_ACCOUNT_NAME="" 9 | COGNITIVE_RESOURCE_GROUP="" 10 | 11 | 12 | STORAGE_CONNECTION_STRING=$(az storage account show-connection-string \ 13 | --resource-group $RESOURCE_GROUP --name $STORAGE_ACCOUNT_NAME \ 14 | --query connectionString --output tsv) 15 | 16 | COSMOSDB_CONNECTION_STRING=$(az cosmosdb list-connection-strings \ 17 | --resource-group $RESOURCE_GROUP --name $COSMOSDB_ACCOUNT_NAME \ 18 | --query connectionStrings --output tsv | head -1 | awk '{print $1}') 19 | 20 | COMPUTER_VSION_API_ENDPOINT=$(az cognitiveservices account show \ 21 | -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 22 | 23 | COMPUTER_VSION_API_SUBSCRIPTION=$(az cognitiveservices account keys list \ 24 | -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 25 | 26 | az webapp config appsettings set \ 27 | -n $APP_NAME \ 28 | -g $RESOURCE_GROUP \ 29 | --settings \ 30 | ComputerVisionSubscription=$COMPUTER_VSION_API_SUBSCRIPTION \ 31 | ComputerVisionApiEndpoint=$COMPUTER_VSION_API_ENDPOINT=\ 32 | MyStorageConnectionString=$STORAGE_CONNECTION_STRING \ 33 | MyCosmosDBConnectionString=$COSMOSDB_CONNECTION_STRING 34 | 35 | echo "Done" 36 | -------------------------------------------------------------------------------- /scripts/upload-blob-sas-token.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import ntpath 4 | import json 5 | import requests 6 | 7 | _AZFUNC_API_KEY="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 8 | _AZFUNC_API_URL="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 9 | 10 | if __name__ == '__main__': 11 | 12 | file_path = "/tmp/test.jpg" 13 | content_type = "image/jpeg" 14 | container_name = "functiontest" 15 | 16 | file_name = ntpath.basename(file_path) 17 | 18 | ### Getting SAS token for uploading files to Azure Blob Storage 19 | payload = { 20 | "permission": "awl", 21 | "container": container_name, 22 | "blobname": file_name 23 | } 24 | r = requests.post(_AZFUNC_API_URL, 25 | headers = { 26 | "Content-Type" : "application/json; charset=UTF-8", 27 | "x-functions-key": _AZFUNC_API_KEY 28 | }, 29 | data=json.dumps(payload) 30 | ) 31 | if r.status_code != 200: 32 | print(f"Getting SAS token request result: status code={r.status_code}") 33 | sys.exit(1) 34 | 35 | content_dict = json.loads(r.content.decode()) 36 | url = content_dict['url'] 37 | 38 | ### Uploading files to Azure Blob Storage 39 | with open(file_path , 'rb') as filehandle: 40 | r = requests.put(url, 41 | data=filehandle, 42 | headers={ 43 | 'Content-Type': content_type, 44 | 'x-ms-blob-type': 'BlockBlob' 45 | }, 46 | params={ 47 | 'file': file_path 48 | } 49 | ) 50 | print(f"Uploading request result: status code={r.status_code}") 51 | -------------------------------------------------------------------------------- /v1functions/blob-sas-token-generator/README.md: -------------------------------------------------------------------------------- 1 | # blob-sas-token-generator 2 | An HTTP trigger Azure Function that returns a SAS token for Azure Storage for the specified container and blob name. You can also specify access permissions for the container/blob name and optionally its token time-to-live period. The SAS token expires in an hour by default. 3 | 4 | ## HTTP Request body format 5 | HTTP Request body must include the following parameters: 6 | ``` 7 | { 8 | 'permission': '', 9 | 'container': '', 10 | 'blobname': '' 11 | 'ttl': '' 12 | } 13 | ``` 14 | 15 | The following values can be used for permissions: 16 | "a" (Add), "r" (Read), "w" (Write), "d" (Delete), "l" (List) 17 | Concatenate multiple permissions, such as "rwa" = Read, Write, Add 18 | 19 | Sample Request Body 20 | ``` 21 | { 22 | 'permission': "rl", 23 | 'container': "functions", 24 | 'blobname': "sample.png" 25 | 'ttl': 2 26 | } 27 | ``` 28 | 29 | ## Response body format 30 | HTTP response body format is: 31 | ``` 32 | { 33 | 'token': '', 34 | 'url' : '' 35 | } 36 | ``` 37 | 38 | Sample Response Body 39 | ``` 40 | {'url': 'https://testfunction.blob.core.windows.net/functiontest/yokawasa.png?sig=sXBjML1Fpk9UnTBtajo05ZTFSk0LWFGvARZ6WlVcAog%3D&srt=o&ss=b&spr=https&sp=rl&sv=2016-05-31&se=2017-07-01T00%3A21%3A38Z&st=2017-07-01T23%3A16%3A38Z', 'token': 'sig=sXBjML1Fpk9UnTBtajo05ZTFSk0LWFGvARZ6WlVcAog%3D&srt=o&ss=b&spr=https&sp=rl&sv=2016-05-31&se=2017-07-01T00%3A21%3A38Z&st=2017-07-01T23%3A16%3A38Z'} 41 | ``` 42 | 43 | ## Test Command 44 | 45 | ``` 46 | #!/bin/sh 47 | 48 | api_url="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 49 | api_key="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 50 | 51 | echo "Sending HTTP POST Request............." 52 | curl -s\ 53 | -H "Content-Type: application/json; charset=UTF-8"\ 54 | -H "x-functions-key: ${api_key}"\ 55 | -XPOST ${api_url} -d'{ 56 | "permission": "rl", 57 | "container": "functiontest", 58 | "blobname": "sample.png", 59 | "ttl": 2 60 | }' 61 | echo "" 62 | ``` 63 | -------------------------------------------------------------------------------- /v1functions/blob-sas-token-generator/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "type": "httpTrigger", 6 | "direction": "in", 7 | "name": "req" 8 | }, 9 | { 10 | "type": "http", 11 | "direction": "out", 12 | "name": "res" 13 | } 14 | ], 15 | "disabled": false 16 | } 17 | -------------------------------------------------------------------------------- /v1functions/blob-sas-token-generator/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | An HTTP trigger Azure Function that returns a SAS token for Azure Storage for the specified container and blob name. 5 | You can also specify access permissions for the container/blob name and optionally its token time-to-live period. 6 | The SAS token expires in an hour by default. 7 | 8 | [HTTP Request body format] 9 | HTTP Request body must include the following parameters: 10 | { 11 | 'permission': '', 12 | 'container': '', 13 | 'blobname': '' 14 | 'ttl': '' 15 | } 16 | 17 | The following values can be used for permissions: 18 | "a" (Add), "r" (Read), "w" (Write), "d" (Delete), "l" (List) 19 | Concatenate multiple permissions, such as "rwa" = Read, Write, Add 20 | 21 | Sample Request Body 22 | { 23 | 'permission': "rl", 24 | 'container': "functions", 25 | 'blobname': "yokawasa.png" 26 | } 27 | 28 | [Response body format] 29 | HTTP response body format is: 30 | { 31 | 'token': '', 32 | 'url' : '' 33 | } 34 | 35 | Sample Response Body 36 | {'url': 'https://testfunction.blob.core.windows.net/functiontest/yokawasa.png?sig=sXBjML1Fpk9UnTBtajo05ZTFSk0LWFGvARZ6WlVcAog%3D&srt=o&ss=b&spr=https&sp=rl&sv=2016-05-31&se=2017-07-01T00%3A21%3A38Z&st=2017-07-01T23%3A16%3A38Z', 'token': 'sig=sXBjML1Fpk9UnTBtajo05ZTFSk0LWFGvARZ6WlVcAog%3D&srt=o&ss=b&spr=https&sp=rl&sv=2016-05-31&se=2017-07-01T00%3A21%3A38Z&st=2017-07-01T23%3A16%3A38Z'} 37 | 38 | """ 39 | import sys 40 | import os 41 | import json 42 | import base64 43 | import hmac 44 | import hashlib 45 | import urllib 46 | from datetime import datetime, timedelta 47 | 48 | _ALLOWED_HTTP_METHOD = "POST" 49 | _AZURE_STORAGE_API_VERSION = "2016-05-31" 50 | _AZURE_STORAGE_CONN_STRING_ENV_NAME = "AZUREWEBJOBSSTORAGE" 51 | _AZURE_FUNCTION_HTTP_INPUT_ENV_NAME = "req" 52 | _AZURE_FUNCTION_HTTP_OUTPUT_ENV_NAME = "res" 53 | _SAS_TOKEN_DEFAULT_TTL = 1 54 | 55 | def write_http_response(status, body_dict): 56 | return_dict = { 57 | "status": status, 58 | "body": json.dumps(body_dict), 59 | "headers": { 60 | "Content-Type": "application/json" 61 | } 62 | } 63 | output = open(os.environ[_AZURE_FUNCTION_HTTP_OUTPUT_ENV_NAME], 'w') 64 | output.write(json.dumps(return_dict)) 65 | 66 | def generate_sas_token (storage_account, storage_key, permission, token_ttl, container_name, blob_name = None ): 67 | sp = permission 68 | # Set start time to five minutes ago to avoid clock skew. 69 | st= str((datetime.utcnow() - timedelta(minutes=5) ).strftime("%Y-%m-%dT%H:%M:%SZ")) 70 | se= str((datetime.utcnow() + timedelta(hours=token_ttl)).strftime("%Y-%m-%dT%H:%M:%SZ")) 71 | srt = 'o' if blob_name else 'co' 72 | 73 | # Construct input value 74 | inputvalue = "{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}\n".format( 75 | storage_account, # 0. account name 76 | sp, # 1. signed permission (sp) 77 | 'b', # 2. signed service (ss) 78 | srt, # 3. signed resource type (srt) 79 | st, # 4. signed start time (st) 80 | se, # 5. signed expire time (se) 81 | '', # 6. signed ip 82 | 'https', # 7. signed protocol 83 | _AZURE_STORAGE_API_VERSION) # 8. signed version 84 | 85 | # Create base64 encoded signature 86 | hash =hmac.new(base64.b64decode(storage_key),inputvalue,hashlib.sha256).digest() 87 | sig = base64.b64encode(hash) 88 | 89 | querystring = { 90 | 'sv': _AZURE_STORAGE_API_VERSION, 91 | 'ss': 'b', 92 | 'srt': srt, 93 | 'sp': sp, 94 | 'se': se, 95 | 'st': st, 96 | 'spr': 'https', 97 | 'sig': sig, 98 | } 99 | sastoken = urllib.urlencode(querystring) 100 | 101 | sas_url = None 102 | if blob_name: 103 | sas_url = "https://{0}.blob.core.windows.net/{1}/{2}?{3}".format( 104 | storage_account, 105 | container_name, 106 | blob_name, 107 | sastoken) 108 | else: 109 | sas_url = "https://{0}.blob.core.windows.net/{1}?{2}".format( 110 | storage_account, 111 | container_name, 112 | sastoken) 113 | 114 | return { 115 | 'token': sastoken, 116 | 'url' : sas_url 117 | } 118 | 119 | # Get HTTP Method 120 | env = os.environ 121 | DEFAULT_METHOD = "GET" 122 | http_method = env['REQ_METHOD'] if env.has_key('REQ_METHOD') else DEFAULT_METHOD 123 | print("http_method={}".format(http_method)) 124 | 125 | # Get Azure Storage Connection String 126 | storage_account = None 127 | storage_key = None 128 | connString = env[_AZURE_STORAGE_CONN_STRING_ENV_NAME] 129 | print("connString={}".format(connString)) 130 | ll = connString.split(';') 131 | for l in ll: 132 | ss = l.split('=',1) 133 | if len(ss) != 2: 134 | continue 135 | if ss[0] == 'AccountName': 136 | storage_account = ss[1] 137 | if ss[0] == 'AccountKey': 138 | storage_key = ss[1] 139 | if not storage_account or not storage_key: 140 | write_http_response(400, 141 | { 'message': 'Function configuration error: NO Azure Storage connection string found!' } 142 | ) 143 | sys.exit(0) 144 | 145 | # Check HTTP Mehtod 146 | if http_method.lower() !=_ALLOWED_HTTP_METHOD.lower(): 147 | write_http_response(405, 148 | { 'message': 'Only POST HTTP Method is allowed' } 149 | ) 150 | sys.exit(0) 151 | 152 | # Get Request Parameters: permission, container, blobname (optional) 153 | req_body_s = open(env[_AZURE_FUNCTION_HTTP_INPUT_ENV_NAME], "r").read() 154 | print("REQUEST BODY => {}".format(req_body_s)) 155 | req_body_dict = json.loads(req_body_s) 156 | if "permission" not in req_body_s or "container" not in req_body_s: 157 | write_http_response(400, 158 | { 'message': 'Permission and container parameters must be included in HTTP request body' } 159 | ) 160 | sys.exit(0) 161 | 162 | permission = req_body_dict['permission'] 163 | container_name = req_body_dict['container'] 164 | blob_name = None 165 | if "blobname" in req_body_dict: 166 | blob_name = req_body_dict['blobname'] 167 | token_ttl = _SAS_TOKEN_DEFAULT_TTL 168 | if "ttl" in req_body_dict: 169 | token_ttl = int(req_body_dict['ttl']) 170 | if token_ttl < 1: 171 | write_http_response(400, 172 | { 'message': 'Token ttl must be digit and more than 0' } 173 | ) 174 | sys.exit(0) 175 | 176 | # Generate SAS Token 177 | token_dict = generate_sas_token(storage_account, storage_key, permission, token_ttl, container_name, blob_name ) 178 | print("Generated Token token={} url={}".format(token_dict['token'], token_dict['url'])) 179 | 180 | # Write HTTP Response 181 | write_http_response(200, token_dict) 182 | 183 | sys.exit(0) 184 | -------------------------------------------------------------------------------- /v1functions/blob-sas-token-generator/send-https-request.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | api_url="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 4 | api_key="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 5 | 6 | echo "Sending HTTP POST Request............." 7 | curl -s\ 8 | -H "Content-Type: application/json; charset=UTF-8"\ 9 | -H "x-functions-key: ${api_key}"\ 10 | -XPOST ${api_url} -d'{ 11 | "permission": "rl", 12 | "container": "functiontest", 13 | "blobname": "sample.png", 14 | "ttl": 2 15 | }' 16 | echo "" 17 | -------------------------------------------------------------------------------- /v1functions/blob-sas-token-generator/t.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | api_url="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 4 | api_key="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 5 | 6 | echo "Sending Bad Request #1: Invalid HTTP Method............." 7 | curl -s\ 8 | -H "Content-Type: application/json; charset=UTF-8"\ 9 | -H "x-functions-key: ${api_key}"\ 10 | -XGET ${api_url} 11 | echo "" 12 | 13 | echo "Sending Bad Request #1: Invalid Request Body............." 14 | curl -s\ 15 | -H "Content-Type: application/json; charset=UTF-8"\ 16 | -H "x-functions-key: ${api_key}"\ 17 | -XPOST ${api_url} -d'{ 18 | "permission": "rl" 19 | }' 20 | echo "" 21 | 22 | echo "Sending Bad Request #3: Invalid TTL............." 23 | curl -s\ 24 | -H "Content-Type: application/json; charset=UTF-8"\ 25 | -H "x-functions-key: ${api_key}"\ 26 | -XPOST ${api_url} -d'{ 27 | "permission": "rl", 28 | "container": "functiontest", 29 | "blobname": "sample.png", 30 | "ttl": -1 31 | }' 32 | echo "" 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/README.md: -------------------------------------------------------------------------------- 1 | # blob-trigger-blob-in-out-bindings 2 | Azure Functions Blob Trigger Python Sample that simply read file from Azure Blob Storage and write an output file to Azure Blob Storage using Blob Storage input and output bindings respectively 3 | 4 | ## Prerequisites 5 | - Azure Functions Account 6 | - General-purpose storage account (Blob storage triggers require a general-purpose storage account) 7 | 8 | ## Trigger and Input/Output Binding (function.json) 9 | 10 | ``` 11 | { 12 | "bindings": [ 13 | { 14 | "name": "blobTriggerTest", 15 | "type": "blobTrigger", 16 | "direction": "in", 17 | "path": "inputcontainer4funcs/{blobname}.{blobextension}", 18 | "connection": "yourstorageaccount_STORAGE" 19 | }, 20 | { 21 | "type": "blob", 22 | "name": "inputBlob", 23 | "path": "inputcontainer4funcs/{blobname}.{blobextension}", 24 | "direction": "in", 25 | "connection": "yourstorageaccount_STORAGE" 26 | }, 27 | { 28 | "type": "blob", 29 | "name": "outputBlob", 30 | "path": "outputcontainer4funcs/{blobname}-encoded.{blobextension}", 31 | "direction": "out", 32 | "connection": "yourstorageaccount_STORAGE" 33 | } 34 | ], 35 | "disabled": false 36 | } 37 | ``` 38 | 39 | You can specify an exact file name, for example, `clear.txt if you want Blob trigger to start the function only when the file is uploaded like this: 40 | ``` 41 | { 42 | "type": "blob", 43 | "name": "inputBlob", 44 | "path": "inputcontainer4funcs/clear.txt", 45 | "direction": "in", 46 | "connection": "yourstorageaccount_STORAGE" 47 | }, 48 | ``` 49 | 50 | ## How the function works? 51 | 52 | Here is how the functions works when you upload a blob file named `sample.txt` 53 | 1. The function is triggered to start when a new or updated blob is detected in a container named `inputcontainer4funcs` 54 | 2. The function reads the detected blob file using Blob storage input binding, and assigns the content into clear_text variable 55 | 3. The function encrypts the content in clear_text variable using ROT13 encyrption and assign the encrypted content into encrypted_text variable 56 | 4. The function stores the encrypted content into a file named `{blobname}-encoded.{blobextension}` in a container named `outputcontainer4funcs` 57 | 58 | [NOTE] In python code, you can NOT get the blob name in the Blob Trigger function. Use another mechanism to trigger the blob processing, such as a queue message that contains the blob name. See the blob input bindings example in [this page](https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-storage-blob#input---example) for more detail. 59 | 60 | Seel Also a sample where you can get the blob file name using queue trigger - [queue-trigger-blob-in-bindings](../queue-trigger-blob-in-binding/) 61 | 62 | 63 | ## LINKS 64 | - [Azure Blob storage bindings for Azure Functions](https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-storage-blob) -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/clear.txt: -------------------------------------------------------------------------------- 1 | The Zen of Python, by Tim Peters 2 | 3 | Beautiful is better than ugly. 4 | Explicit is better than implicit. 5 | Simple is better than complex. 6 | Complex is better than complicated. 7 | Flat is better than nested. 8 | Sparse is better than dense. 9 | Readability counts. 10 | Special cases aren't special enough to break the rules. 11 | Although practicality beats purity. 12 | Errors should never pass silently. 13 | Unless explicitly silenced. 14 | In the face of ambiguity, refuse the temptation to guess. 15 | There should be one-- and preferably only one --obvious way to do it. 16 | Although that way may not be obvious at first unless you're Dutch. 17 | Now is better than never. 18 | Although never is often better than *right* now. 19 | If the implementation is hard to explain, it's a bad idea. 20 | If the implementation is easy to explain, it may be a good idea. 21 | Namespaces are one honking great idea -- let's do more of those! 22 | -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/clear.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v1functions/blob-trigger-blob-in-out-bindings/clear.zip -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/function/function-sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "blobTriggerTest", 5 | "type": "blobTrigger", 6 | "direction": "in", 7 | "path": "inputcontainer4funcs/{blobname}.txt", 8 | "connection": "functionsstore01_STORAGE" 9 | }, 10 | { 11 | "type": "blob", 12 | "name": "inputBlob", 13 | "path": "inputcontainer4funcs/{blobname}.txt", 14 | "direction": "in", 15 | "connection": "functionsstore01_STORAGE" 16 | }, 17 | { 18 | "type": "blob", 19 | "name": "outputBlob", 20 | "path": "outputcontainer4funcs/{blobname}-encrypted.txt", 21 | "direction": "out", 22 | "connection": "functionsstore01_STORAGE" 23 | } 24 | ], 25 | "disabled": false 26 | } 27 | -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "blobTriggerTest", 5 | "type": "blobTrigger", 6 | "direction": "in", 7 | "path": "inputcontainer4funcs/{blobname}.{blobextension}", 8 | "connection": "yourstorageaccount_STORAGE" 9 | }, 10 | { 11 | "type": "blob", 12 | "name": "inputBlob", 13 | "path": "inputcontainer4funcs/{blobname}.{blobextension}", 14 | "direction": "in", 15 | "connection": "yourstorageaccount_STORAGE" 16 | }, 17 | { 18 | "type": "blob", 19 | "name": "outputBlob", 20 | "path": "outputcontainer4funcs/{blobname}-encoded.{blobextension}", 21 | "direction": "out", 22 | "connection": "yourstorageaccount_STORAGE" 23 | } 24 | ], 25 | "disabled": false 26 | } 27 | -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 5 | Azure Functions Blob Trigger Python Sample 6 | - Simple reading file from Azure Blob Storage and write an output file to Azure Blob Storage 7 | 8 | """ 9 | 10 | import os 11 | 12 | # Read inputfile given from ENV variable named 'inputBlob' 13 | input_file = open(os.environ['inputBlob'], 'r') 14 | clear_text = input_file.read() 15 | input_file.close() 16 | 17 | # Encrypt text with ROT13 encryption 18 | encrypted_text= clear_text.decode('rot13') 19 | 20 | # Output the modified file to a separate folder in the Storage Blob 21 | output_file = open(os.environ['outputBlob'], 'w') 22 | output_file.write(encrypted_text) 23 | output_file.close() 24 | 25 | -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/function_zip/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "blobTriggerTest", 5 | "type": "blobTrigger", 6 | "direction": "in", 7 | "path": "inputcontainer4funcs/{blobname}.zip", 8 | "connection": "yourstorageaccount_STORAGE" 9 | }, 10 | { 11 | "type": "blob", 12 | "name": "inputBlob", 13 | "path": "inputcontainer4funcs/{blobname}.zip", 14 | "direction": "in", 15 | "connection": "yourstorageaccount_STORAGE" 16 | } 17 | ], 18 | "disabled": false 19 | } 20 | -------------------------------------------------------------------------------- /v1functions/blob-trigger-blob-in-out-bindings/function_zip/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Azure Functions Blob Trigger Python Sample 5 | - Reading Zip archived files from Azure Blob Storage 6 | """ 7 | 8 | import os 9 | import zipfile 10 | 11 | # Read Input Zip file given from ENV variable named 'inputBlob' 12 | zippath=os.environ['inputBlob'] 13 | print("Zip File Path: {}".format(zipfilepath)) 14 | 15 | # Read text files in the given zip file assuming that the files are clear text 16 | with zipfile.ZipFile(zippath) as z: 17 | for filename in z.namelist(): 18 | print("filename:{} in zipfile:{}".format(filename, zippath)) 19 | if not os.path.isdir(filename): 20 | # Reading the file in Zipfile 21 | with z.open(filename) as f: 22 | for line in f: 23 | print(line) 24 | -------------------------------------------------------------------------------- /v1functions/cosmosdb-trigger-cosmosdb-in-binding/README.md: -------------------------------------------------------------------------------- 1 | # cosmosdb-trigger-cosmosdb-in-binding 2 | Azure Functions CosmosDB Trigger Python Sample. The function simply read & dump documets which are added to or changed in Azure Cosmos DB by leveraging CosmosDB input binding 3 | 4 | 5 | 6 | ## Configuration - function.json 7 | 8 | ``` 9 | { 10 | "bindings": [ 11 | { 12 | "type": "cosmosDBTrigger", 13 | "name": "triggeredCosmosdb", 14 | "connectionStringSetting": "yoichikademo1_DOCUMENTDB", 15 | "databaseName": "mydb", 16 | "collectionName": "mycontent", 17 | "leaseCollectionName": "mycontent_leaves", 18 | "createLeaseCollectionIfNotExists": true, 19 | "direction": "in" 20 | }, 21 | { 22 | "type": "documentDB", 23 | "name": "inputCosmosdb", 24 | "databaseName": "mydb", 25 | "collectionName": "mycontent", 26 | "connection": "yoichikademo1_DOCUMENTDB", 27 | "direction": "in" 28 | } 29 | ], 30 | "disabled": false 31 | } 32 | ``` 33 | 34 | [NOTE] Currently all documents in the collection are returned. 35 | -------------------------------------------------------------------------------- /v1functions/cosmosdb-trigger-cosmosdb-in-binding/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "type": "cosmosDBTrigger", 5 | "name": "triggeredCosmosdb", 6 | "connectionStringSetting": "yoichikademo1_DOCUMENTDB", 7 | "databaseName": "mydb", 8 | "collectionName": "mycontent", 9 | "leaseCollectionName": "mycontent_leaves", 10 | "createLeaseCollectionIfNotExists": true, 11 | "direction": "in" 12 | }, 13 | { 14 | "type": "documentDB", 15 | "name": "inputCosmosdb", 16 | "databaseName": "mydb", 17 | "collectionName": "mycontent", 18 | "connection": "yoichikademo1_DOCUMENTDB", 19 | "direction": "in" 20 | } 21 | ], 22 | "disabled": false 23 | } 24 | -------------------------------------------------------------------------------- /v1functions/cosmosdb-trigger-cosmosdb-in-binding/function/run.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | # Read Cosmosdb document given from ENV variable named 'inputCosmosdb' - Name for Input CosmosDB binding 5 | inputCosmosdb = open(os.environ['inputCosmosdb'],'r').read() 6 | print(inputCosmosdb) 7 | 8 | # Deserialize inputCosmosdb (string) into JSON objects 9 | docObjects = json.loads(inputCosmosdb) 10 | 11 | # Dump documents 12 | print("Document Count={}".format(len(docObjects))) 13 | for doc in docObjects: 14 | print(doc) 15 | -------------------------------------------------------------------------------- /v1functions/eventhub-trigger-table-out-bindings/README.md: -------------------------------------------------------------------------------- 1 | # eventhub-trigger-table-out-bindings 2 | Azure Functions EventHub Trigger Python Sample that read message (device info) in EventHub that sent from sender and write an output record to Azure Table Storage using Azure Table bindings 3 | 4 | | Trigger | In/Out Bindings | 5 | ------------ | ----------- | 6 | | EventHub Trigger | output:Table | 7 | 8 | 9 | ## Pre-requisites 10 | 11 | * **Azure EventHub Account**: You need an EventHub account to which you send an event which which Azure functions triggers the functions. 12 | * **Azure Storage Account (General Purpose Type)**: You need an Azure Storage account as the function read device info in EventHub which is originally sent from sender and store them into your Table. 13 | 14 | ## Bindings Configuration 15 | 16 | You need to configure 2 kinds of bindings: (1) EventHub Trigger (2) Azure Table output Binding. You can configure them either by directly editing function.json file or via Azure Functions' "Function Apps - Functions - Integrate" UI in Azure Portal 17 | 18 | ``` 19 | { 20 | "bindings": [ 21 | { 22 | "type": "eventHubTrigger", 23 | "name": "myEventHubMessage", 24 | "path": "", 25 | "consumerGroup": "$Default", 26 | "connection": "__EVENTHUB", 27 | "cardinality": "one", 28 | "direction": "in" 29 | }, 30 | { 31 | "type": "table", 32 | "name": "outputTable", 33 | "tableName": "", 34 | "connection": "_STORAGE", 35 | "direction": "out" 36 | } 37 | ], 38 | "disabled": false 39 | } 40 | ``` 41 | 42 | 43 | 44 | ## Test Command 45 | 46 | send-event.py is a test command that allows you to send device info to your Eventhub 47 | 48 | ``` 49 | import json 50 | from azure.servicebus import ServiceBusService 51 | 52 | eventhub_namespace="" 53 | entity= "" 54 | sasKeyName = "" 55 | sasKeyValue= "" 56 | 57 | sbs = ServiceBusService(eventhub_namespace, shared_access_key_name=sasKeyName, shared_access_key_value=sasKeyValue) 58 | message = { 59 | "deviceId": "myDevice001", 60 | "temperature": "13.5" 61 | } 62 | sbs.send_event(entity, json.dumps(message)) 63 | ``` 64 | 65 | -------------------------------------------------------------------------------- /v1functions/eventhub-trigger-table-out-bindings/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "type": "eventHubTrigger", 5 | "name": "myEventHubMessage", 6 | "path": "", 7 | "consumerGroup": "$Default", 8 | "connection": "__EVENTHUB", 9 | "cardinality": "one", 10 | "direction": "in" 11 | }, 12 | { 13 | "type": "table", 14 | "name": "outputTable", 15 | "tableName": "", 16 | "connection": "_STORAGE", 17 | "direction": "out" 18 | } 19 | ], 20 | "disabled": false 21 | } 22 | -------------------------------------------------------------------------------- /v1functions/eventhub-trigger-table-out-bindings/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | import sys 5 | import json 6 | import uuid 7 | 8 | """ 9 | Expected Receiving Body Message: 10 | { 11 | "deviceId": "myDevice0001", 12 | "temperature": "10.1" 13 | } 14 | [note] 15 | Use "deviceId" as PartitionKey for Azure table to write 16 | 17 | Expected Function's Trigger Configuration: 18 | - 'trigger': AzureEventHub 19 | - 'Event hub cardinality': 'One' 20 | 21 | Expected Function's Output Configuration: 22 | - 'output': Azure Table Storage 23 | - 'Table parameter name: 'outputTable 24 | 25 | """ 26 | 27 | # Read the EventHub Message 28 | receivedBody = json.loads(open(os.environ['myEventHubMessage']).read()) 29 | print('Received body:', receivedBody) 30 | # -> ('received object:', {u'deviceId': u'myDevice0001', u'temperature': u'10.1'}) 31 | if not 'deviceId' in receivedBody or not 'temperature' in receivedBody: 32 | print("Skip: invalid eventHub body!") 33 | sys.exit(0) 34 | 35 | ## Device ID 36 | recordId = str(uuid.uuid4()) 37 | 38 | outdoc= { 39 | "PartitionKey": receivedBody['deviceId'], 40 | "RowKey": recordId, 41 | "temperature": receivedBody['temperature'] 42 | } 43 | # Writing to Azure Table Storage (Table parameter name: outputTable) 44 | print('Writing data to Azure Table:', outdoc) 45 | with open(os.environ['outputTable'], 'w') as f: 46 | json.dump(outdoc,f) 47 | -------------------------------------------------------------------------------- /v1functions/eventhub-trigger-table-out-bindings/send-event.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # install pre-requisite package 4 | # pip install azure-servicebus 5 | # 6 | 7 | import json 8 | from azure.servicebus import ServiceBusService 9 | 10 | eventhub_namespace="" 11 | entity= "" 12 | sasKeyName = "" 13 | sasKeyValue= "" 14 | 15 | sbs = ServiceBusService(eventhub_namespace, shared_access_key_name=sasKeyName, shared_access_key_value=sasKeyValue) 16 | message = { 17 | "deviceId": "myDevice001", 18 | "temperature": "13.5" 19 | } 20 | sbs.send_event(entity, json.dumps(message)) 21 | -------------------------------------------------------------------------------- /v1functions/http-trigger-dump-request/README.md: -------------------------------------------------------------------------------- 1 | # http-trigger-dump-request 2 | Azure Functions HTTP Trigger Python Sample that get and dump HTTPS request info that the trigger receives 3 | 4 | 5 | | Trigger | In/Out Bindings | 6 | ------------ | ----------- | 7 | | HTTP Trigger | output:HTTP | 8 | 9 | 10 | Support both Python 2 and 3.X 11 | 12 | -------------------------------------------------------------------------------- /v1functions/http-trigger-dump-request/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "function", 5 | "type": "httpTrigger", 6 | "direction": "in", 7 | "name": "req" 8 | }, 9 | { 10 | "type": "http", 11 | "direction": "out", 12 | "name": "res" 13 | } 14 | ], 15 | "disabled": false 16 | } 17 | -------------------------------------------------------------------------------- /v1functions/http-trigger-dump-request/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Azure Functions HTTP Trigger Python Sample 5 | - Get and dump HTTPS request info that the trigger receives 6 | 7 | Special Thanks to anthonyeden for great Python HTTP example: 8 | https://github.com/anthonyeden/Azure-Functions-Python-HTTP-Example 9 | 10 | Suppoert both Python 2 and 3.X 11 | """ 12 | 13 | import os 14 | import json 15 | 16 | _AZURE_FUNCTION_DEFAULT_METHOD = "GET" 17 | _AZURE_FUNCTION_HTTP_INPUT_ENV_NAME = "req" 18 | _AZURE_FUNCTION_HTTP_OUTPUT_ENV_NAME = "res" 19 | _REQ_PREFIX = "REQ_" 20 | 21 | def write_http_response(status, body_dict): 22 | return_dict = { 23 | "status": status, 24 | "body": json.dumps(body_dict), 25 | "headers": { 26 | "Content-Type": "application/json" 27 | } 28 | } 29 | output = open(os.environ[_AZURE_FUNCTION_HTTP_OUTPUT_ENV_NAME], 'w') 30 | output.write(json.dumps(return_dict)) 31 | 32 | 33 | env = os.environ 34 | 35 | # Get HTTP METHOD 36 | http_method = env['REQ_METHOD'] if 'REQ_METHOD' in env else _AZURE_FUNCTION_DEFAULT_METHOD 37 | print("HTTP METHOD => {}".format(http_method)) 38 | 39 | # Get QUERY STRING 40 | req_url = env['REQ_HEADERS_X-ORIGINAL-URL'] if 'REQ_HEADERS_X-ORIGINAL-URL' in env else '' 41 | urlparts =req_url.split('?') 42 | query_string = urlparts[1] if len(urlparts) == 2 else '' 43 | print("QUERY STRING => {}".format(query_string)) 44 | 45 | if http_method.lower() == 'post': 46 | request_body = open(env[_AZURE_FUNCTION_HTTP_INPUT_ENV_NAME], "r").read() 47 | print("REQUEST BODY => {}".format(request_body)) 48 | 49 | res_body = {} 50 | print("Dump ENVIRONMENT VARIABLES:") 51 | for k in env: 52 | print("ENV: {0} => {1}".format(k, env[k])) 53 | if (k.startswith(_REQ_PREFIX)): 54 | res_body[k] = env[k] 55 | 56 | write_http_response(200, res_body) 57 | -------------------------------------------------------------------------------- /v1functions/http-trigger-dump-request/send-https-requests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | api_url="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/?xxx=yyy" 4 | api_key="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 5 | 6 | echo "Sending HTTP GET Request............." 7 | curl -s\ 8 | -H "Content-Type: application/json; charset=UTF-8"\ 9 | -H "x-functions-key: ${api_key}"\ 10 | -XGET ${api_url} -d'foo=abc1111&bar=xyz2222' 11 | 12 | echo "Sending HTTP POST Request............." 13 | curl -s\ 14 | -H "Content-Type: application/json; charset=UTF-8"\ 15 | -H "x-functions-key: ${api_key}"\ 16 | -XPOST ${api_url} -d'{ 17 | "id: "00000121", 18 | "name": "Yoichi Kawasaki" 19 | }' 20 | -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/README.md: -------------------------------------------------------------------------------- 1 | # proxies-simple-condition-matches 2 | Azure Functions Python Sample that re-write dynamic and static page url using Azure Functions Proxies 3 | 4 | | Trigger | In/Out Bindings | 5 | ------------ | ----------- | 6 | | HTTP Trigger | output:HTTP | 7 | 8 | 9 | ## Proxies Configuration 10 | ``` 11 | { 12 | "$schema": "http://json.schemastore.org/proxies", 13 | "proxies": { 14 | "url-rewrite-static": { 15 | "matchCondition": { 16 | "route": "/static/{page}", 17 | "methods": [ 18 | "GET" 19 | ] 20 | }, 21 | "backendUri": "https://.blob.core.windows.net/staticpage/{page}" 22 | }, 23 | "url-rewrite-dynamic": { 24 | "matchCondition": { 25 | "route": "/content/{contentid}" 26 | }, 27 | "backendUri": "https://.azurewebsites.net/api/?contentid={contentid}" 28 | } 29 | } 30 | } 31 | ``` 32 | 33 | 34 | ## Sample Access 35 | 36 | For static access 37 | ``` 38 | https://.azurewebsites.net/static/lasvegas.html 39 | ``` 40 | ![](https://github.com/yokawasa/azure-functions-python-samples/raw/master/proxies-simple-condition-matches/img/static-page-result.png) 41 | 42 | For dynamic access 43 | ``` 44 | https://.azurewebsites.net/content/{contentid} 45 | ``` 46 | ![](https://github.com/yokawasa/azure-functions-python-samples/raw/master/proxies-simple-condition-matches/img/dynamic-page-result.png) 47 | 48 | 49 | ## LINKS 50 | - [Work with Azure Functions Proxies](https://docs.microsoft.com/en-us/azure/azure-functions/functions-proxies) 51 | -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/function-http-trigger-content/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "authLevel": "anonymous", 5 | "type": "httpTrigger", 6 | "direction": "in", 7 | "name": "req", 8 | "methods": [ 9 | "get" 10 | ] 11 | }, 12 | { 13 | "type": "http", 14 | "direction": "out", 15 | "name": "res" 16 | } 17 | ], 18 | "disabled": false 19 | } 20 | -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/function-http-trigger-content/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import sys 4 | import os 5 | import json 6 | 7 | _AZURE_FUNCTION_DEFAULT_METHOD = "GET" 8 | _AZURE_FUNCTION_HTTP_INPUT_ENV_NAME = "req" 9 | _AZURE_FUNCTION_HTTP_OUTPUT_ENV_NAME = "res" 10 | 11 | def write_http_response(status, body_dict): 12 | return_dict = { 13 | "status": status, 14 | "body": json.dumps(body_dict), 15 | "headers": { 16 | "Content-Type": "application/json" 17 | } 18 | } 19 | output = open(os.environ[_AZURE_FUNCTION_HTTP_OUTPUT_ENV_NAME], 'w') 20 | output.write(json.dumps(return_dict)) 21 | 22 | def get_qs_value(keyname, querystring): 23 | v = None 24 | kvsets = querystring.split('&') 25 | for kvset in kvsets: 26 | items = kvset.split('=') 27 | if len(items) == 2 and items[0].lower() == keyname: 28 | v = items[1] 29 | return v 30 | 31 | env = os.environ 32 | 33 | # Get HTTP METHOD 34 | http_method = env['REQ_METHOD'] if env.has_key('REQ_METHOD') else _AZURE_FUNCTION_DEFAULT_METHOD 35 | print "HTTP METHOD => {}".format(http_method) 36 | 37 | # Get QUERY STRING 38 | req_url = env['REQ_HEADERS_X-ORIGINAL-URL'] if env.has_key('REQ_HEADERS_X-ORIGINAL-URL') else '' 39 | urlparts =req_url.split('?') 40 | query_string = urlparts[1] if len(urlparts) == 2 else '' 41 | print "QUERY STRING => {}".format(query_string) 42 | content_id = get_qs_value('contentid', query_string) 43 | print "CONTENT ID => {}".format(content_id) 44 | 45 | res_body = {} 46 | res_body['contentid'] = content_id 47 | 48 | write_http_response(200, res_body) 49 | -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/img/dynamic-page-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v1functions/proxies-simple-condition-matches/img/dynamic-page-result.png -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/img/static-page-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v1functions/proxies-simple-condition-matches/img/static-page-result.png -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/proxies.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json.schemastore.org/proxies", 3 | "proxies": { 4 | "url-rewrite-static": { 5 | "matchCondition": { 6 | "route": "/static/{page}", 7 | "methods": [ 8 | "GET" 9 | ] 10 | }, 11 | "backendUri": "https://yoichikademo.blob.core.windows.net/staticpage/{page}" 12 | }, 13 | "url-rewrite-dynamic": { 14 | "matchCondition": { 15 | "route": "/content/{contentid}" 16 | }, 17 | "backendUri": "https://yoichikademo1.azurewebsites.net/api/HttpTrigger-Python-Content?contentid={contentid}" 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/static/lasvegas.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Las Vegas image 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /v1functions/proxies-simple-condition-matches/static/lasvegas.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v1functions/proxies-simple-condition-matches/static/lasvegas.png -------------------------------------------------------------------------------- /v1functions/queue-trigger-blob-in-binding/README.md: -------------------------------------------------------------------------------- 1 | # queue-trigger-blob-in-binding 2 | Azure Functions Queue Trigger Python Sample that obtain a blog file name from Queue as a queue message and read a file named the blog file name in Azure Blob Storage using Blob Input Binding| Queue 3 | 4 | ## Prerequisites 5 | - Azure Functions Account 6 | - General-purpose storage account (Blob storage triggers require a general-purpose storage account) 7 | 8 | ## Trigger and Input/Output Binding (function.json) 9 | 10 | ``` 11 | { 12 | "bindings": [ 13 | { 14 | "name": "inputMessage", 15 | "type": "queueTrigger", 16 | "direction": "in", 17 | "queueName": "myqueue4python", 18 | "connection": "yourstorageaccount_STORAGE" 19 | }, 20 | { 21 | "type": "blob", 22 | "name": "inputBlob", 23 | "path": "inputcontainer4funcs/{queueTrigger}", 24 | "connection": "yourstorageaccount_STORAGE", 25 | "direction": "in" 26 | } 27 | ], 28 | "disabled": false 29 | } 30 | ``` 31 | 32 | ## How the function works? 33 | 34 | Here is how the functions works when you add a queue message `sample.txt` in queue named `myqueue4python`. Queue message is supposed to be a blob file name. 35 | 1. The function is triggered to start when a new message is detected in the queue named `myqueue4python`. 36 | 2. By reading an environment variable named `inputMessage`, the function gets a file name (full path) in which the blob file name is written. The function get the blob file name (=`sample.txt`) from reading the file. 37 | 3. Using Blob storage input binding, the function reads the blob file named `sample.txt` (path: `inputcontainer4funcs/sample.txt`) and assign the content into clear_text variable 38 | 4. The function encrypts the content in clear_text variable using ROT13 encyrption and assign the encrypted content into encrypted_text variable 39 | 40 | 41 | ## LINKS 42 | - [Azure Blob storage bindings for Azure Functions](https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-storage-blob) -------------------------------------------------------------------------------- /v1functions/queue-trigger-blob-in-binding/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "inputMessage", 5 | "type": "queueTrigger", 6 | "direction": "in", 7 | "queueName": "myqueue4python", 8 | "connection": "yourstorageaccount_STORAGE" 9 | }, 10 | { 11 | "type": "blob", 12 | "name": "inputBlob", 13 | "path": "inputcontainer4funcs/{queueTrigger}", 14 | "connection": "yourstorageaccount_STORAGE", 15 | "direction": "in" 16 | } 17 | ], 18 | "disabled": false 19 | } 20 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-blob-in-binding/function/run.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | ######################### 1: READ QUEUE MESSAGE ############################# 4 | blob_name = open(os.environ['inputMessage']).read() 5 | print("Blob file name: '{0}'".format(blob_name)) 6 | 7 | ######################### 2: READ BLOB FILE ################################## 8 | input_file = open(os.environ['inputBlob'], 'r') 9 | clear_text = input_file.read() 10 | input_file.close() 11 | print("Content in the blob file: '{0}'".format(clear_text)) 12 | 13 | ######################### 3: PROCESS THE CONTENT ############################# 14 | # Encrypt text with ROT13 encryption 15 | encrypted_text= clear_text.decode('rot13') 16 | print("Encrypted Content: '{0}'".format(encrypted_text)) 17 | 18 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-cosmosdb-in-binding/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "inputMessage", 5 | "type": "queueTrigger", 6 | "direction": "in", 7 | "queueName": "indoc-queue", 8 | "connection": "functionsstore01_STORAGE" 9 | }, 10 | { 11 | "type": "documentDB", 12 | "name": "inputDocument", 13 | "databaseName": "mydb", 14 | "collectionName": "mycoll", 15 | "sqlQuery": "SELECT * from c where c.id = {queueTrigger}", 16 | "connection": "yoichikademo1_DOCUMENTDB", 17 | "direction": "in" 18 | } 19 | ], 20 | "disabled": false 21 | } 22 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-cosmosdb-in-binding/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Azure Functions Queue Trigger 5 | - Get a document ID from Queue as a queue message, select a document object from Cosmos DB by using the document ID, and finally dump the object 6 | """ 7 | 8 | import os 9 | import sys 10 | import json 11 | 12 | def functions_process(doc): 13 | print(doc) 14 | 15 | def functions_main(): 16 | ## function's starting point 17 | print ("Starting the operation...") 18 | cosmosdb_data = open(os.environ['inputDocument']).read() 19 | docs=json.loads(cosmosdb_data) 20 | if len(docs) < 1: 21 | errorlog("No documents obtained via Azure Function Queue & CosmosDB binding") 22 | sys.exit(0) 23 | doc = docs[0] 24 | 25 | ## process doc 26 | print ("Processing document") 27 | functions_process(doc) 28 | 29 | ## Output results if needed 30 | print ("The end of operation") 31 | 32 | functions_main() 33 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-rssfeed-crawler/README.md: -------------------------------------------------------------------------------- 1 | # queue-trigger-rssfeed-crawler 2 | Azure Functions Queue Trigger Python Sample that get RSS feed URL from Queue and dump all items that obtained from RSS feed 3 | 4 | 5 | ## Add RSS feeds to Queue to trigger the function 6 | ``` 7 | https://azure.microsoft.com/en-us/blog/feed/ 8 | https://azure.microsoft.com/en-us/updates/feed/ 9 | ... 10 | ``` 11 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-rssfeed-crawler/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "inputMessage", 5 | "type": "queueTrigger", 6 | "direction": "in", 7 | "queueName": "rssfeedqueue", 8 | "connection": "" 9 | } 10 | ], 11 | "disabled": false 12 | } 13 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-rssfeed-crawler/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 5 | Azure Functions Queue Trigger Python Sample 6 | - Get RSS feed URL from Queue and dump all items that obtained from RSS feed 7 | 8 | """ 9 | 10 | import os 11 | import sys 12 | sys.path.append("site-packages") 13 | import feedparser 14 | 15 | # Read the queue message 16 | rss_feed_url = open(os.environ['inputMessage']).read() 17 | print "Python script processes rss feed: '{0}'".format(rss_feed_url) 18 | 19 | # Get RSS feed by using feedparser module 20 | feed=feedparser.parse(rss_feed_url) 21 | 22 | # Dump RSS feed obtained 23 | print feed.feed.title.encode('utf-8') 24 | for entry in feed[ 'entries' ]: 25 | print "Title: ", entry[ 'title' ].encode('utf-8') 26 | print "URL: ", entry[ 'link' ] 27 | print "Description: ", entry[ 'description' ].encode('utf-8') 28 | print "Date: ", entry[ 'updated' ] 29 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-rssfeed-crawler/setup-site-packages.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | PIP_COMMNAD="pip" 4 | SITE_PACKAGES_DIR="site-packages" 5 | TMP_DIR="t" 6 | cwd=`dirname "$0"` 7 | expr "$0" : "/.*" > /dev/null || cwd=`(cd "$cwd" && pwd)` 8 | 9 | if ! type "$PIP_COMMNAD" > /dev/null; then 10 | echo "$PIP_COMMNAD command doesn't exist! Please install pip here" 11 | exit 0 12 | fi 13 | 14 | cd $cwd 15 | 16 | if [ ! -e $TMP_DIR ] 17 | then 18 | echo "Creating directory $TMP_DIR" 19 | mkdir -p $TMP_DIR 20 | fi 21 | 22 | echo "Getting all packages that needed for this program...." 23 | pip install --install-option="--prefix=$cwd/$TMP_DIR" feedparser 24 | TARGET_DIR=`find $TMP_DIR -name "$SITE_PACKAGES_DIR"` 25 | mv $TARGET_DIR function/ 26 | rm -rf $TMP_DIR 27 | echo "The packages are copied: function/$SITE_PACKAGES_DIR!!" 28 | echo "Please make sure to install $SITE_PACKAGES_DIR along with run.py and function.json" 29 | 30 | echo "done!" 31 | exit 0 32 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-sendgrid/README.md: -------------------------------------------------------------------------------- 1 | # queue-trigger-sendgrid 2 | Azure Functions Queue Trigger Python Sample that send email by using SendGrid bindings 3 | 4 | | Trigger | In/Out Bindings | 5 | ------------ | ----------- | 6 | | Queue Trigger | output:SendGrid | 7 | 8 | 9 | ## Pre-requisites 10 | 11 | * **SendGrid Account/API Key**: You need an SendGrid account and API Key with which you can use Azure Functions to send customized email programmatically. For more info on SendGrid, please see [How to Send Email Using SendGrid with Azure](https://docs.microsoft.com/en-us/azure/app-service-web/sendgrid-dotnet-how-to-send-email) 12 | * **Azure Storage Account (General Purpose Type)**: You need an Azure Storage account as the function read message info in Azure Queue Storage 13 | 14 | ## Bindings Configuration 15 | 16 | You need to configure 2 kinds of bindings: (1) Queue Trigger (2) SendGrid output Binding. You can configure them either by directly editing function.json file or via Azure Functions' "Function Apps - Functions - Integrate" UI in Azure Portal 17 | 18 | ``` 19 | { 20 | "bindings": [ 21 | { 22 | "type": "queueTrigger", 23 | "name": "", 24 | "direction": "in", 25 | "queueName": "", 26 | "connection": "" 27 | }, 28 | { 29 | "type": "sendGrid", 30 | "name": "", 31 | "from": "", 32 | "apiKey": "", 33 | "direction": "out" 34 | } 35 | ], 36 | "disabled": false 37 | } 38 | ``` 39 | 40 | * note1 - The connection property must contain the name of an app setting that contains a storage connection string. In the Azure portal, the standard editor in the Integrate tab configures this app setting for you when you select a storage account. 41 | * App Settings - For detail information about how to work with App Service settings, please refer to [How to manage a function app in the Azure portal](https://docs.microsoft.com/en-us/azure/azure-functions/functions-how-to-use-azure-function-app-settings) 42 | 43 | Here is an example configuration for this sample code: 44 | ``` 45 | { 46 | "bindings": [ 47 | { 48 | "name": "inputMessage", 49 | "type": "queueTrigger", 50 | "direction": "in", 51 | "queueName": "sendgrid-queue", 52 | "connection": "azurefunctionsb5d4aebe_STORAGE" 53 | }, 54 | { 55 | "type": "sendGrid", 56 | "name": "outputMessage", 57 | "from": "sender@contoso.com", 58 | "apiKey": "MY_SENDGRID_API_KEY", 59 | "direction": "out" 60 | } 61 | ], 62 | "disabled": false 63 | } 64 | ``` 65 | 66 | ## How to Test 67 | (1) Deploy run.py and functions.json to your function app 68 | (2) Configure binding file - function.json 69 | (3) Add a message to your queue 70 | (4) Check if the function send an email via SendGrid. 71 | 72 | See also [Strategies for testing your code in Azure Functions](https://docs.microsoft.com/en-us/azure/azure-functions/functions-test-a-function) 73 | 74 | 75 | ## Useful Links 76 | * [How to Send Email Using SendGrid with Azure](https://docs.microsoft.com/en-us/azure/app-service-web/sendgrid-dotnet-how-to-send-email) 77 | * [How to manage a function app in the Azure portal](https://docs.microsoft.com/en-us/azure/azure-functions/functions-how-to-use-azure-function-app-settings) 78 | * [Azure Functions SendGrid bindings](https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-sendgrid) 79 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-sendgrid/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "type": "queueTrigger", 5 | "name": "", 6 | "direction": "in", 7 | "queueName": "", 8 | "connection": "" 9 | }, 10 | { 11 | "type": "sendGrid", 12 | "name": "", 13 | "from": "", 14 | "apiKey": "", 15 | "direction": "out" 16 | } 17 | ], 18 | "disabled": false 19 | } 20 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-sendgrid/function/function.json.example: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "inputMessage", 5 | "type": "queueTrigger", 6 | "direction": "in", 7 | "queueName": "sendgrid-queue", 8 | "connection": "azurefunctionsb5d4aebe_STORAGE" 9 | }, 10 | { 11 | "type": "sendGrid", 12 | "name": "outputMessage", 13 | "from": "sender@contoso.com", 14 | "apiKey": "MY_SENDGRID_API_KEY", 15 | "direction": "out" 16 | } 17 | ], 18 | "disabled": false 19 | } 20 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-sendgrid/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | 4 | Azure Functions Queue Trigger Python Sample that send email by using SendGrid bindings. 5 | 6 | SendGrid binding reference: 7 | https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-sendgrid 8 | """ 9 | import os, json 10 | 11 | _AZURE_FUNCTION_QUEUE_INPUT_ENV_NAME = "inputMessage" 12 | _AZURE_FUNCTION_SENDGRID_OUTPUT_ENV_NAME = "outputMessage" 13 | _SENDGRID_EMAIL_TO = "receiver@contoso.com" 14 | _SENDGRID_EMAIL_SUBJECT = "Mail Subject" 15 | 16 | # read the queue message 17 | messageText = open(os.environ[_AZURE_FUNCTION_QUEUE_INPUT_ENV_NAME]).read() 18 | print("Function script processed queue message '{0}'".format(messageText)) 19 | 20 | outmsg={ 21 | "personalizations": [ 22 | { 23 | "to": [{ "email": _SENDGRID_EMAIL_TO }] 24 | } 25 | ], 26 | "subject": _SENDGRID_EMAIL_SUBJECT, 27 | "content": [ 28 | { 29 | "type": 'text/plain', 30 | "value": messageText 31 | } 32 | ] 33 | } 34 | 35 | # Send email using SendGrid (output name: outputMessage) 36 | print('Sending email using SendGrid:', outmsg) 37 | with open(os.environ[_AZURE_FUNCTION_SENDGRID_OUTPUT_ENV_NAME], 'wb') as f: 38 | json.dump(outmsg,f) 39 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-tagging-images/README.md: -------------------------------------------------------------------------------- 1 | # queue-trigger-tagging-images 2 | Azure Functions Queue Trigger Python Sample that tags images stored on Azure Blob Storage by using Cognitive Vision API 3 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-tagging-images/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "inputMessage", 5 | "type": "queueTrigger", 6 | "direction": "in", 7 | "queueName": "blobqueue", 8 | "connection": "" 9 | } 10 | ], 11 | "disabled": false 12 | } 13 | -------------------------------------------------------------------------------- /v1functions/queue-trigger-tagging-images/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 5 | Azure Functions Queue Trigger Python Sample 6 | - Tagging image files stored on Azure Blob Storage by using Cognitive Vision API 7 | 8 | """ 9 | 10 | import os,sys 11 | import json 12 | from datetime import datetime, timedelta 13 | import httplib, urllib, base64 14 | import base64 15 | import hmac 16 | import hashlib 17 | 18 | STORAGE_ACCOUNT_NAME = '' 19 | STORAGE_ACCOUNT_KEY = '' 20 | CONTAINER_NAME = '' 21 | AZURE_STORAGE_VERSION = "2015-12-11" 22 | COGNITIVE_SUBSCRIPTION_KEY = '' 23 | 24 | ######################### 1: READ QUEUE MESSAGE ############################### 25 | blob_name = open(os.environ['inputMessage']).read() 26 | print "Python script processes blob name: '{0}'".format(blob_name) 27 | 28 | ######################### 2: GET BLOB SAS URL ################################# 29 | st= str(datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")) 30 | se= str((datetime.utcnow() + timedelta(hours=1)).strftime("%Y-%m-%dT%H:%M:%SZ")) 31 | iv = "{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}\n".format( 32 | STORAGE_ACCOUNT_NAME, # 0. account name 33 | 'r', # 1. signed permissions 34 | 'b', # 2. signed service 35 | 'o', # 3. signed resource type 36 | st, # 4. signed start time 37 | se, # 5. signed expire time 38 | '', # 6. signed ip 39 | 'https', # 7. signed protocol 40 | AZURE_STORAGE_VERSION) # 8. signed version 41 | 42 | # Create base64 encoded signature 43 | hash =hmac.new(base64.b64decode(STORAGE_ACCOUNT_KEY),iv,hashlib.sha256).digest() 44 | sig = base64.b64encode(hash) 45 | querystring = { 46 | 'sv':AZURE_STORAGE_VERSION,'ss':'b','srt':'o','sp':'r','se':se,'st':st,'spr':'https','sig':sig } 47 | blob_url = "https://{0}.blob.core.windows.net/{1}/{2}?{3}".format( 48 | STORAGE_ACCOUNT_NAME, 49 | CONTAINER_NAME, 50 | blob_name, 51 | urllib.urlencode(querystring) ) 52 | print "Blob SAS URL: " + blob_url 53 | 54 | ##################### 3: COGNITIVE SERIVICE PROCESSING (GET TAGS) ############ 55 | headers = { 56 | 'Content-Type': 'application/json', 57 | 'Ocp-Apim-Subscription-Key': COGNITIVE_SUBSCRIPTION_KEY, 58 | } 59 | params = urllib.urlencode({}) 60 | body = json.dumps( {"url": blob_url}) 61 | r_data = '' 62 | try: 63 | conn = httplib.HTTPSConnection('westus.api.cognitive.microsoft.com') 64 | conn.request("POST", "/vision/v1.0/tag?%s" % params, body, headers) 65 | response = conn.getresponse() 66 | r_data = response.read() 67 | print(r_data) 68 | conn.close() 69 | except Exception as e: 70 | print("[Errno {0}] {1}".format(e.errno, e.strerror)) 71 | 72 | if r_data: 73 | r_jsonobject=json.loads(r_data) 74 | tags = [] 75 | for tag_dict in r_jsonobject['tags']: 76 | tags.append(tag_dict['name']) 77 | outdoc= { 78 | "blob_name": blob_name, 79 | "tags": ','.join(tags) 80 | } 81 | print outdoc 82 | -------------------------------------------------------------------------------- /v1functions/timer-trigger-azuresearch-index-monitoring/README.md: -------------------------------------------------------------------------------- 1 | # timer-trigger-azuresearch-index-monitoring 2 | Azure Functions Timer Trigger Python Sample that get Azure Search index statistics via API and store the results into DocumentDB 3 | -------------------------------------------------------------------------------- /v1functions/timer-trigger-azuresearch-index-monitoring/function/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "myTimer", 5 | "type": "timerTrigger", 6 | "direction": "in", 7 | "schedule": "0 */5 * * * *" 8 | }, 9 | { 10 | "type": "documentDB", 11 | "name": "outputDocument", 12 | "databaseName": "functions2docdb", 13 | "collectionName": "indexstats", 14 | "createIfNotExists": true, 15 | "connection": "", 16 | "direction": "out" 17 | } 18 | ], 19 | "disabled": false 20 | } 21 | -------------------------------------------------------------------------------- /v1functions/timer-trigger-azuresearch-index-monitoring/function/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | 4 | Azure Functions Timer Trigger Python Sample 5 | - Get Azure Search Index Statistics and store them into DocumentDB 6 | 7 | DocumentDB binding reference: 8 | https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-documentdb 9 | 10 | """ 11 | 12 | import sys, os, datetime, json 13 | import httplib, urllib 14 | 15 | AZURE_SEARCH_SERVICE_NAME='' 16 | AZURE_SEARCH_API_VER='' 17 | AZURE_SEARCH_ADMIN_KEY='' 18 | AZURE_SEARCH_INDEX_NAME='' 19 | CONTENT_TYPE='application/json' 20 | 21 | headers = { 22 | 'api-key': AZURE_SEARCH_ADMIN_KEY, 23 | 'content-type': "application/json" 24 | } 25 | 26 | r_data = '' 27 | try: 28 | conn = httplib.HTTPSConnection('{}.search.windows.net'.format(AZURE_SEARCH_SERVICE_NAME)) 29 | conn.request("GET", 30 | "/indexes/{0}/stats?api-version={1}".format(AZURE_SEARCH_INDEX_NAME, AZURE_SEARCH_API_VER), 31 | '', headers) 32 | response = conn.getresponse() 33 | r_data = response.read() 34 | conn.close() 35 | except Exception as e: 36 | print("[Errno {0}] {1}".format(e.errno, e.strerror)) 37 | 38 | if r_data: 39 | r_jsonobject=json.loads(r_data) 40 | outdoc= { 41 | "doccount": r_jsonobject['documentCount'], 42 | "storagesize": r_jsonobject['storageSize'], 43 | "timestamp": str(datetime.datetime.utcnow()) 44 | } 45 | print outdoc 46 | # Writing to DocumentDB (Document parameter name: outputDocument) 47 | with open(os.environ['outputDocument'], 'wb') as f: 48 | json.dump(outdoc,f) 49 | -------------------------------------------------------------------------------- /v2functions/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/azure-functions/python:2.0 2 | 3 | COPY . /home/site/wwwroot 4 | 5 | ENV AzureFunctionsJobHost__Logging__Console__IsEnabled=true 6 | 7 | RUN cd /home/site/wwwroot && \ 8 | pip install -r requirements.txt 9 | -------------------------------------------------------------------------------- /v2functions/VERSION: -------------------------------------------------------------------------------- 1 | v0.0.1 2 | -------------------------------------------------------------------------------- /v2functions/blob-trigger-cosmosdb-out-binding/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import azure.functions as func 4 | import http.client, urllib.parse, base64, json 5 | import requests 6 | 7 | subscription_key = os.environ['ComputerVisionSubscription'] 8 | api_endpoint = os.environ['ComputerVisionApiEndpoint'] 9 | 10 | headers = { 11 | # Request headers. 12 | 'Content-Type': 'application/octet-stream', 13 | 'Ocp-Apim-Subscription-Key': subscription_key, 14 | } 15 | 16 | params = urllib.parse.urlencode({ 17 | # Request parameters. All of them are optional. 18 | 'visualFeatures': 'Description', 19 | 'language': 'en', 20 | }) 21 | 22 | def main(myblob: func.InputStream, doc: func.Out[func.Document]): 23 | logging.info(f"Python blob trigger function processed blob \n" 24 | f"Name: {myblob.name}\n" 25 | f"Blob Size: {myblob.length} bytes") 26 | 27 | img_data = myblob.read() 28 | try: 29 | api_url = "{0}vision/v1.0/analyze?{1}".format(api_endpoint, params) 30 | logging.info("API URL:{}".format(api_url)) 31 | 32 | r = requests.post(api_url, 33 | headers=headers, 34 | data=img_data) 35 | 36 | parsed = r.json() 37 | logging.info("Response:") 38 | logging.info(json.dumps(parsed, sort_keys=True, indent=2)) 39 | 40 | # Set output data 41 | outdata = {} 42 | outdata['name'] = myblob.name 43 | taglist = parsed['description']['tags'] 44 | outdata['text'] = ' '.join(taglist) 45 | logging.info(json.dumps(outdata, sort_keys=True, indent=2)) 46 | 47 | ## Store output data using Cosmos DB output binding 48 | doc.set(func.Document.from_json(json.dumps(outdata))) 49 | except Exception as e: 50 | print('Error:') 51 | print(e) 52 | -------------------------------------------------------------------------------- /v2functions/blob-trigger-cosmosdb-out-binding/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "myblob", 6 | "type": "blobTrigger", 7 | "direction": "in", 8 | "path": "upload-images/{name}", 9 | "connection": "MyStorageConnectionString" 10 | }, 11 | { 12 | "direction": "out", 13 | "type": "cosmosDB", 14 | "name": "doc", 15 | "databaseName": "testdb", 16 | "collectionName": "testcol01", 17 | "leaseCollectionName": "leases", 18 | "createLeaseCollectionIfNotExists": true, 19 | "connectionStringSetting": "MyCosmosDBConnectionString", 20 | "createIfNotExists": true 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /v2functions/blob-trigger-cosmosdb-out-binding/readme.md: -------------------------------------------------------------------------------- 1 | # blob-trigger-cosmosdb-out-binding (Python) 2 | 3 | | Sample | Description | Trigger | In Bindings | Out Bindings 4 | | ------------- | ------------- | ------------- | ----------- | ----------- | 5 | | `cosmosdb-trigger-cosmosdb-in-binding` | Azure Functions Blob Storage Trigger Python Sample. The function gets image data from Azure Blob Trigger, gets tags for the image with [Computer Vision API](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/) ([Azure Cognitive Services](https://azure.microsoft.com/en-us/services/cognitive-services/)), and store the tags into Azure Cosmos DB by leveraging CosmosDB output binding | Blob Storage | NONE | CosmosDB | 6 | 7 | 8 | - [blob-trigger-cosmosdb-out-binding (Python)](#blob-trigger-cosmosdb-out-binding-python) 9 | - [Configurations](#configurations) 10 | - [Create Computer Vision resource](#create-computer-vision-resource) 11 | - [Create Blob Storage account & Container](#create-blob-storage-account--container) 12 | - [Create Cosmos DB Account and DB & Collection](#create-cosmos-db-account-and-db--collection) 13 | - [How to develop and publish the function](#how-to-develop-and-publish-the-function) 14 | - [Local development](#local-development) 15 | - [Publish the function to the cloud](#publish-the-function-to-the-cloud) 16 | - [Test Request](#test-request) 17 | - [HTTP Request body format](#http-request-body-format) 18 | - [Response body format](#response-body-format) 19 | 20 | ## Configurations 21 | As specified in `functions.json`, you need Azure Storage account for triggering functions and Cosmos DB Account to store data using Cosmos DB output binding 22 | 23 | ```json 24 | { 25 | "scriptFile": "__init__.py", 26 | "bindings": [ 27 | { 28 | "name": "myblob", 29 | "type": "blobTrigger", 30 | "direction": "in", 31 | "path": "upload-images/{name}", 32 | "connection": "MyStorageConnectionString" 33 | }, 34 | { 35 | "direction": "out", 36 | "type": "cosmosDB", 37 | "name": "doc", 38 | "databaseName": "testdb", 39 | "collectionName": "testcol01", 40 | "leaseCollectionName": "leases", 41 | "createLeaseCollectionIfNotExists": true, 42 | "connectionStringSetting": "MyCosmosDBConnectionString", 43 | "createIfNotExists": true 44 | } 45 | ] 46 | } 47 | ``` 48 | 49 | ### Create Computer Vision resource 50 | 51 | First, create a Computer Vision resource 52 | 53 | ```bash 54 | COGNITIVE_RESOURCE_GROUP="rg_cognitive_test" 55 | REGION="eastasia" 56 | COGNITIVE_ACCOUNT_NAME="mycompvision001" 57 | 58 | echo "Create Resource Group: $COGNITIVE_RESOURCE_GROUP" 59 | az group create --name $COGNITIVE_RESOURCE_GROUP --location $REGION 60 | 61 | echo "Create Cognitive Resource for Computer Vision: $COGNITIVE_ACCOUNT_NAME" 62 | az cognitiveservices account create \ 63 | -n $COGNITIVE_ACCOUNT_NAME \ 64 | -g $COGNITIVE_RESOURCE_GROUP \ 65 | --kind ComputerVision \ 66 | --sku S1 \ 67 | -l $REGION \ 68 | --yes 69 | ``` 70 | 71 | Then, Get Computer Vision API Key and endpoint. You'll use the values in later step: 72 | ```bash 73 | COMPUTER_VISION_API_ENDPOINT=$(az cognitiveservices account show -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 74 | COMPUTER_VISION_API_KEY=$(az cognitiveservices account keys list -n $COGNITIVE_ACCOUNT_NAME -g $COGNITIVE_RESOURCE_GROUP --output tsv |awk '{print $1}') 75 | echo "API Endpoint: $COMPUTER_VISION_API_ENDPOINT" 76 | echo "API KEY: $COMPUTER_VISION_API_KEY" 77 | ``` 78 | 79 | ### Create Blob Storage account & Container 80 | 81 | Create an Azure Storage Account 82 | ```bash 83 | RESOURCE_GROUP="rg-testfunctions" 84 | REGION="japaneast" 85 | STORAGE_ACCOUNT="teststore" 86 | az storage account create --name $STORAGE_ACCOUNT \ 87 | --location $REGION \ 88 | --resource-group $RESOURCE_GROUP \ 89 | --sku Standard_LRS 90 | ``` 91 | 92 | Create a container in the storage you've created 93 | ```sh 94 | # Get Storage Key 95 | ACCESS_KEY=$(az storage account keys list --account-name $STORAGE_ACCOUNT --resource-group $RESOURCE_GROUP --output tsv |head -1 | awk '{print $3}') 96 | 97 | az storage container create \ 98 | --name "upload-images" \ 99 | --account-name $STORAGE_ACCOUNT \ 100 | --account-key $ACCESS_KEY 101 | ``` 102 | 103 | ### Create Cosmos DB Account and DB & Collection 104 | 105 | Create a Cosmos DB Account 106 | ```sh 107 | COSMOSDB_ACCOUNT_NAME="azfuncv2db" 108 | RESOURCE_GROUP="RG-azfuncv2" 109 | az cosmosdb create \ 110 | --name $COSMOSDB_ACCOUNT_NAME \ 111 | --kind GlobalDocumentDB \ 112 | --resource-group $RESOURCE_GROUP 113 | ``` 114 | 115 | Create Database and Collection in the Cosmos DB that you've created 116 | 117 | ```sh 118 | # Get Key 119 | COSMOSDB_KEY=$(az cosmosdb list-keys --name $COSMOSDB_ACCOUNT_NAME --resource-group $RESOURCE_GROUP --output tsv |awk '{print $1}') 120 | 121 | # Create Database 122 | DATABASE_NAME="testdb" 123 | az cosmosdb database create \ 124 | --name $COSMOSDB_ACCOUNT_NAME \ 125 | --db-name $DATABASE_NAME \ 126 | --key $COSMOSDB_KEY \ 127 | --resource-group $RESOURCE_GROUP 128 | 129 | # Create a container with a partition key and provision 400 RU/s throughput. 130 | COLLECTION_NAME="testcol01" 131 | az cosmosdb collection create \ 132 | --resource-group $RESOURCE_GROUP \ 133 | --collection-name $COLLECTION_NAME \ 134 | --name $COSMOSDB_ACCOUNT_NAME \ 135 | --db-name $DATABASE_NAME \ 136 | --partition-key-path /name \ 137 | --throughput 400 138 | 139 | # Create a container for leaves 140 | # 'leaves' need to be a single collection partition 141 | # Please see also: https://github.com/Azure/azure-functions-core-tools/issues/930 142 | LEASES_COLLECTION_NAME="leases" 143 | az cosmosdb collection create \ 144 | --resource-group $RESOURCE_GROUP \ 145 | --collection-name $LEASES_COLLECTION_NAME \ 146 | --name $COSMOSDB_ACCOUNT_NAME \ 147 | --db-name $DATABASE_NAME \ 148 | --throughput 400 149 | ``` 150 | 151 | ## How to develop and publish the function 152 | ### Local development 153 | ```sh 154 | func host start 155 | ``` 156 | 157 | ### Publish the function to the cloud 158 | 159 | Publish the function to the cloud 160 | ```sh 161 | FUNCTION_APP_NAME="MyFunctionApp" 162 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 163 | ``` 164 | 165 | Add Functions App Settings 166 | ```sh 167 | FUNCTION_STORAGE_CONNECTION="*************" 168 | COSMOS_DB_CONNECTION="***************" 169 | az webapp config appsettings set \ 170 | -n $FUNCTION_APP_NAME \ 171 | -g $RESOURCE_GROUP \ 172 | --settings \ 173 | ComputerVisionSubscription=$COMPUTER_VISION_API_KEY \ 174 | ComputerVisionApiEndpoint=$COMPUTER_VISION_API_ENDPOINT \ 175 | MyStorageConnectionString=$FUNCTION_STORAGE_CONNECTION \ 176 | MyCosmosDBConnectionString=$COSMOS_DB_CONNECTION 177 | ``` 178 | 179 | ## Test Request 180 | 181 | ### HTTP Request body format 182 | HTTP Request body must include the following parameters: 183 | ``` 184 | { 185 | 'permission': '', 186 | 'container': '', 187 | 'blobname': '' 188 | 'ttl': '' 189 | } 190 | ``` 191 | 192 | The following values can be used for permissions: 193 | `a` (Add), `r` (Read), `w` (Write), `d` (Delete), `l` (List) 194 | Concatenate multiple permissions, such as `rwa` = Read, Write, Add 195 | 196 | Sample Request Body 197 | ``` 198 | { 199 | 'permission': "rl", 200 | 'container': "functiontest", 201 | 'blobname': "sample.png" 202 | 'ttl': 2 203 | } 204 | ``` 205 | 206 | ## Response body format 207 | HTTP response body format is: 208 | ``` 209 | { 210 | 'token': '', 211 | 'url' : '' 212 | } 213 | ``` 214 | 215 | Sample Response Body 216 | ``` 217 | {'url': 'https://testfunction.blob.core.windows.net/functiontest/yokawasa.png?sig=sXBjML1Fpk9UnTBtajo05ZTFSk0LWFGvARZ6WlVcAog%3D&srt=o&ss=b&spr=https&sp=rl&sv=2016-05-31&se=2017-07-01T00%3A21%3A38Z&st=2017-07-01T23%3A16%3A38Z', 'token': 'sig=sXBjML1Fpk9UnTBtajo05ZTFSk0LWFGvARZ6WlVcAog%3D&srt=o&ss=b&spr=https&sp=rl&sv=2016-05-31&se=2017-07-01T00%3A21%3A38Z&st=2017-07-01T23%3A16%3A38Z'} 218 | ``` -------------------------------------------------------------------------------- /v2functions/blob-trigger-watermark-blob-out-binding/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import azure.functions as func 3 | import io 4 | import sys 5 | from PIL import Image 6 | 7 | # Final image composite size 8 | FINAL_COMPOSITE_MAX_HEIGHT = 700 9 | FINAL_COMPOSITE_MAX_WIDTH = 700 10 | 11 | # Set watermark 7 times smaller in width than base image 12 | WATERMARK_WIDTH_RATIO = 7 13 | 14 | # Note the type annotation for our output blob, func.Out[bytes], 15 | # since it's an image we'll be writing back. 16 | # See this for more - 17 | # https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python#outputs 18 | # https://docs.microsoft.com/en-us/python/api/azure-functions/azure.functions.out?view=azure-python 19 | def main(blobin: func.InputStream, blobout: func.Out[bytes], context: func.Context): 20 | logging.info(f"--- Python blob trigger function processed blob \n" 21 | f"----- Name: {blobin.name}\n" 22 | f"----- Blob Size: {blobin.length} bytes") 23 | 24 | # Pillow calls blobin.read() so only 25 | # pass in the image object 26 | input_image = blobin 27 | watermark_image = f'{context.function_directory}/watermark.png' 28 | 29 | try: 30 | base_image = Image.open(input_image) 31 | watermark = Image.open(watermark_image) 32 | except OSError as e: 33 | print(f'EXCEPTION: Unable to read input as image. {e}') 34 | sys.exit(254) 35 | except Exception as e: 36 | print(f'EXCEPTION: {e}') 37 | sys.exit(255) 38 | 39 | # Resize base image if too large 40 | if base_image.width > FINAL_COMPOSITE_MAX_WIDTH or base_image.height > FINAL_COMPOSITE_MAX_HEIGHT: 41 | if base_image.height > base_image.width: 42 | factor = 900 / base_image.height 43 | else: 44 | factor = 900 / base_image.width 45 | base_image = base_image.resize((int(base_image.width * factor), int(base_image.height * factor))) 46 | 47 | # Set watermark size 48 | relative_ws = round(base_image.width/WATERMARK_WIDTH_RATIO) 49 | watermark_size = (relative_ws, relative_ws) 50 | watermark.thumbnail(watermark_size, Image.ANTIALIAS) 51 | 52 | # Watermark anchor (left, top) 53 | position = (16, 16) 54 | 55 | img = Image.new('RGBA', (base_image.width, base_image.height), (0, 0, 0, 0)) 56 | img.paste(base_image, (0, 0)) 57 | # Watermark may not have an alpha channel, 58 | # therefore no mask to apply 59 | try: 60 | img.paste(watermark, position, mask=watermark) 61 | except ValueError: 62 | img.paste(watermark, position) 63 | # Render image on screen (save to a temp file and calls 64 | # xv on Linux and Preview.app on Mac) 65 | # We could improve this by drawing straight to a OpenCV 66 | # canvas.. maybe. 67 | img.show() 68 | 69 | # Store final composite in a memory stream 70 | img_byte_arr = io.BytesIO() 71 | # Convert composite to RGB so we can save as JPEG 72 | img.convert('RGB').save(img_byte_arr, format='JPEG') 73 | 74 | # Optionally, save final composite to disk 75 | # output_image = 'output.jpg' 76 | # img.save(output_image) 77 | 78 | # Write to output blob 79 | # 80 | # Use this to set blob content from a file instead: 81 | # with open(output_image, mode='rb') as file: 82 | # blobout.set(file.read()) 83 | # 84 | # Set blob content from byte array in memory 85 | blobout.set(img_byte_arr.getvalue()) 86 | 87 | logging.info(f"----- Watermarking successful") 88 | -------------------------------------------------------------------------------- /v2functions/blob-trigger-watermark-blob-out-binding/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "blobin", 6 | "type": "blobTrigger", 7 | "direction": "in", 8 | "path": "input/{blobname}.{blobextension}", 9 | "connection": "MyStorageConnectionString" 10 | }, 11 | { 12 | "name": "blobout", 13 | "type": "blob", 14 | "direction": "out", 15 | "path": "output/{blobname}_watermarked.jpg", 16 | "connection": "MyStorageConnectionString" 17 | } 18 | ] 19 | } -------------------------------------------------------------------------------- /v2functions/blob-trigger-watermark-blob-out-binding/readme.md: -------------------------------------------------------------------------------- 1 | # blob-trigger-watermark-blob-out-binding (Python) 2 | 3 | | Sample | Description | Trigger | In Bindings | Out Bindings 4 | | ------------- | ------------- | ------------- | ----------- | ----------- | 5 | | `blob-trigger-watermark-blob-out-binding` | Azure Function Python Sample that watermarks an image. This function triggers on an input blob (image) and adds a watermark by calling into the Pillow library. The resulting composite image is then written back to blob storage using a blob output binding. | Blob Storage | Blob Storage | Blob Storage | 6 | 7 | ## Sample output 8 | ![](sample.jpg) 9 | 10 | ## Configurations 11 | As specified in `functions.json`, you need Azure Storage account for triggering functions, input & output binding. 12 | 13 | ```json 14 | { 15 | "scriptFile": "__init__.py", 16 | "bindings": [ 17 | { 18 | "name": "blobin", 19 | "type": "blobTrigger", 20 | "direction": "in", 21 | "path": "input/{blobname}.{blobextension}", 22 | "connection": "MyStorageConnectionString" 23 | }, 24 | { 25 | "name": "blobout", 26 | "type": "blob", 27 | "direction": "out", 28 | "path": "output/{blobname}_watermarked.jpg", 29 | "connection": "MyStorageConnectionString" 30 | } 31 | ] 32 | } 33 | ``` 34 | 35 | ### Create Azure Storage Account 36 | 37 | Create an Azure Storage Account 38 | ```sh 39 | RESOURCE_GROUP="rg-testfunctions" 40 | REGION="japaneast" 41 | STORAGE_ACCOUNT="teststore" 42 | az storage account create --name $STORAGE_ACCOUNT \ 43 | --location $REGION \ 44 | --resource-group $RESOURCE_GROUP \ 45 | --sku Standard_LRS 46 | ``` 47 | 48 | ### Create Blob Storage Containers 49 | 50 | Create 2 blob containers in the storage you've created: `input` and `output` 51 | ```sh 52 | # Get Storage Key 53 | ACCESS_KEY=$(az storage account keys list --account-name $STORAGE_ACCOUNT --resource-group $RESOURCE_GROUP --output tsv |head -1 | awk '{print $3}') 54 | 55 | az storage container create \ 56 | --name "input" \ 57 | --account-name $STORAGE_ACCOUNT \ 58 | --account-key $ACCESS_KEY 59 | 60 | az storage container create \ 61 | --name "output" \ 62 | --account-name $STORAGE_ACCOUNT \ 63 | --account-key $ACCESS_KEY 64 | ``` 65 | 66 | ## How to develop and publish the functions 67 | 68 | ### Local development 69 | 70 | ```sh 71 | func host start 72 | ``` 73 | 74 | ### Try it out 75 | Upload an image to `input` blob container under the storage account to try it out. The final watermarked composite should surface in the `output` container. When developing locally it should also render it on screen if you have [imagemagick][1] or [xv][2] installed (works on Mac, Linux, WSL with Xming X server, unsure what happens on native Windows). 76 | 77 | ### Publish the function to the cloud 78 | 79 | Publish the function to the cloud 80 | ```sh 81 | FUNCTION_APP_NAME="MyFunctionApp" 82 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 83 | ``` 84 | 85 | Add Functions App Settings 86 | ```sh 87 | FUNCTION_STORAGE_CONNECTION="*************" 88 | az webapp config appsettings set \ 89 | -n $FUNCTION_APP_NAME \ 90 | -g $RESOURCE_GROUP \ 91 | --settings \ 92 | MyStorageConnectionString=$FUNCTION_STORAGE_CONNECTION 93 | ``` 94 | 95 | 96 | [1]: https://github.com/haegar/xv 97 | [2]: https://packages.ubuntu.com/cosmic/imagemagick 98 | -------------------------------------------------------------------------------- /v2functions/blob-trigger-watermark-blob-out-binding/sample.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v2functions/blob-trigger-watermark-blob-out-binding/sample.jpg -------------------------------------------------------------------------------- /v2functions/blob-trigger-watermark-blob-out-binding/watermark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v2functions/blob-trigger-watermark-blob-out-binding/watermark.png -------------------------------------------------------------------------------- /v2functions/cosmos-trigger-cosmodb-output-binding/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import azure.functions as func 3 | import json 4 | 5 | def _rot13(c): 6 | if 'A' <= c and c <= 'Z': 7 | return chr((ord(c) - ord('A') + 13) % 26 + ord('A')) 8 | if 'a' <= c and c <= 'z': 9 | return chr((ord(c) - ord('a') + 13) % 26 + ord('a')) 10 | return c 11 | 12 | def process_rot13(s): 13 | g = (_rot13(c) for c in s) 14 | return ''.join(g) 15 | 16 | def main(docs: func.DocumentList, outdoc: func.Out[func.Document]) -> str: 17 | 18 | newdocs = func.DocumentList() 19 | for doc in docs: 20 | logging.info(doc.to_json()) 21 | 22 | ## Process Something 23 | clear_text = doc["text"] 24 | encrypted_text= process_rot13(clear_text) 25 | 26 | ## Create a new doc (type:Dict) 27 | newdoc_dict = { 28 | "name": doc["name"], 29 | "text": encrypted_text 30 | } 31 | 32 | ## Append the new doc to DocumentList for output 33 | newdocs.append(func.Document.from_dict(newdoc_dict)) 34 | 35 | ## Set the DocumentList to outdoc to store into CosmosDB using CosmosDB output binding 36 | outdoc.set(newdocs) -------------------------------------------------------------------------------- /v2functions/cosmos-trigger-cosmodb-output-binding/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "type": "cosmosDBTrigger", 6 | "name": "docs", 7 | "direction": "in", 8 | "leaseCollectionName": "leases", 9 | "connectionStringSetting": "MyCosmosDBConnectionString", 10 | "databaseName": "testdb", 11 | "collectionName": "testcol01", 12 | "createLeaseCollectionIfNotExists": true 13 | }, 14 | { 15 | "direction": "out", 16 | "type": "cosmosDB", 17 | "name": "outdoc", 18 | "databaseName": "testdb", 19 | "collectionName": "testcol02", 20 | "leaseCollectionName": "leases", 21 | "createLeaseCollectionIfNotExists": true, 22 | "connectionStringSetting": "MyCosmosDBConnectionString", 23 | "createIfNotExists": true 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /v2functions/cosmos-trigger-cosmodb-output-binding/readme.md: -------------------------------------------------------------------------------- 1 | # cosmos-trigger-cosmodb-output-binding (Python) 2 | 3 | | Sample | Description | Trigger | In Bindings | Out Bindings 4 | | ------------- | ------------- | ------------- | ----------- | ----------- | 5 | | `cosmos-trigger-cosmodb-output-binding` | Azure Functions Cosmos DB Trigger Python Sample. The function gets document data from Azure Cosmos DB Trigger, ROT13 encode obtained clear text, and store encoded data into Azure Cosmos DB by using Cosmos DB output binding | CosmosDB | NONE | CosmosDB | 6 | 7 | ## Configurations 8 | As specified in `functions.json`, you need Azure Cosmos DB account for triggering functions and storing data using Cosmos DB output binding 9 | 10 | ```json 11 | { 12 | "scriptFile": "__init__.py", 13 | "bindings": [ 14 | { 15 | "type": "cosmosDBTrigger", 16 | "name": "docs", 17 | "direction": "in", 18 | "leaseCollectionName": "leases", 19 | "connectionStringSetting": "MyCosmosDBConnectionString", 20 | "databaseName": "testdb", 21 | "collectionName": "testcol01", 22 | "createLeaseCollectionIfNotExists": true 23 | }, 24 | { 25 | "direction": "out", 26 | "type": "cosmosDB", 27 | "name": "outdoc", 28 | "databaseName": "testdb", 29 | "collectionName": "testcol02", 30 | "leaseCollectionName": "leases", 31 | "createLeaseCollectionIfNotExists": true, 32 | "connectionStringSetting": "MyCosmosDBConnectionString", 33 | "createIfNotExists": true 34 | } 35 | ] 36 | } 37 | 38 | ``` 39 | ### Create Cosmos DB Account and DB & Collection for testing 40 | 41 | Create a Cosmos DB Account 42 | ```sh 43 | COSMOSDB_ACCOUNT_NAME="azfuncv2db" 44 | RESOURCE_GROUP="RG-azfuncv2" 45 | az cosmosdb create \ 46 | --name $COSMOSDB_ACCOUNT_NAME \ 47 | --kind GlobalDocumentDB \ 48 | --resource-group $RESOURCE_GROUP 49 | ``` 50 | 51 | Create Database and Collection in the Cosmos DB that you've created 52 | 53 | ```sh 54 | # Get Key 55 | COSMOSDB_KEY=$(az cosmosdb list-keys --name $COSMOSDB_ACCOUNT_NAME --resource-group $RESOURCE_GROUP --output tsv |awk '{print $1}') 56 | 57 | # Create Database 58 | az cosmosdb database create \ 59 | --name $COSMOSDB_ACCOUNT_NAME \ 60 | --db-name $DATABASE_NAME \ 61 | --key $COSMOSDB_KEY \ 62 | --resource-group $RESOURCE_GROUP 63 | 64 | # Create a container with a partition key and provision 400 RU/s throughput. 65 | COLLECTION_NAME="testcol01" 66 | az cosmosdb collection create \ 67 | --resource-group $RESOURCE_GROUP \ 68 | --collection-name $COLLECTION_NAME \ 69 | --name $COSMOSDB_ACCOUNT_NAME \ 70 | --db-name $DATABASE_NAME \ 71 | --partition-key-path /name \ 72 | --throughput 400 73 | 74 | COLLECTION_NAME="testcol02" 75 | az cosmosdb collection create \ 76 | --resource-group $RESOURCE_GROUP \ 77 | --collection-name $COLLECTION_NAME \ 78 | --name $COSMOSDB_ACCOUNT_NAME \ 79 | --db-name $DATABASE_NAME \ 80 | --partition-key-path /name \ 81 | --throughput 400 82 | 83 | # Create a container for leaves 84 | # 'leaves' need to be a single collection partition 85 | # Please see also: https://github.com/Azure/azure-functions-core-tools/issues/930 86 | LEASES_COLLECTION_NAME="leases" 87 | az cosmosdb collection create \ 88 | --resource-group $RESOURCE_GROUP \ 89 | --collection-name $LEASES_COLLECTION_NAME \ 90 | --name $COSMOSDB_ACCOUNT_NAME \ 91 | --db-name $DATABASE_NAME \ 92 | --throughput 400 93 | ``` 94 | 95 | ## How to develop and publish the function 96 | ### Local development 97 | ```sh 98 | func host start 99 | ``` 100 | 101 | ### Publish the function to the cloud 102 | 103 | Publish the function to the cloud 104 | ```sh 105 | FUNCTION_APP_NAME="MyFunctionApp" 106 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 107 | ``` 108 | 109 | Add Functions App Settings 110 | ```sh 111 | COSMOS_DB_CONNECTION="***************" 112 | az webapp config appsettings set \ 113 | -n $FUNCTION_APP_NAME \ 114 | -g $RESOURCE_GROUP \ 115 | --settings \ 116 | MyCosmosDBConnectionString=$COSMOS_DB_CONNECTION 117 | ``` 118 | -------------------------------------------------------------------------------- /v2functions/host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "extensionBundle": { 4 | "id": "Microsoft.Azure.Functions.ExtensionBundle", 5 | "version": "[1.*, 2.0.0)" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /v2functions/http-trigger-blob-sas-token/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | An HTTP trigger Azure Function that returns a SAS token for Azure Storage for the specified container and blob name. 3 | You can also specify access permissions for the container/blob name and optionally its token time-to-live period. 4 | The SAS token expires in an hour by default. 5 | 6 | [HTTP Request body format] 7 | HTTP Request body must include the following parameters: 8 | { 9 | 'permission': '', 10 | 'container': '', 11 | 'blobname': '' 12 | 'ttl': '' 13 | } 14 | 15 | The following values can be used for permissions: 16 | "a" (Add), "r" (Read), "w" (Write), "d" (Delete), "l" (List) 17 | Concatenate multiple permissions, such as "rwa" = Read, Write, Add 18 | 19 | Sample Request Body 20 | { 21 | 'permission': "rl", 22 | 'container': "functions", 23 | 'blobname': "yokawasa.png" 24 | } 25 | 26 | [Response body format] 27 | HTTP response body format is: 28 | { 29 | 'token': '', 30 | 'url' : '' 31 | } 32 | 33 | Sample Response Body 34 | {"token": "sv=2018-03-28&ss=b&srt=o&sp=rl&se=2019-03-29T14%3A02%3A37Z&st=2019-03-29T11%3A57%3A37Z&spr=https&sig=Sh7RAa5MZBk7gfv0haCbEbllFXoiOWJDK9itzPeqURE%3D", "url": "https://azfuncv2linuxstore.blob.core.windows.net/functiontest/sample.jpg?sv=2018-03-28&ss=b&srt=o&sp=rl&se=2019-03-29T14%3A02%3A37Z&st=2019-03-29T11%3A57%3A37Z&spr=https&sig=Sh7RAa5MZBk7gfv0haCbEbllFXoiOWJDK9itzPeqURE%3D" } 35 | 36 | """ 37 | import os 38 | import json 39 | import base64 40 | import hmac 41 | import hashlib 42 | import urllib.parse 43 | from datetime import datetime, timedelta 44 | 45 | import logging 46 | import azure.functions as func 47 | 48 | _ALLOWED_HTTP_METHOD = "POST" 49 | _AZURE_STORAGE_API_VERSION = "2018-03-28" 50 | _AZURE_STORAGE_CONN_STRING_ENV_NAME = "MyStorageConnectionString" 51 | _SAS_TOKEN_DEFAULT_TTL = 1 52 | 53 | connString = os.environ[_AZURE_STORAGE_CONN_STRING_ENV_NAME] 54 | 55 | def write_http_response(status, body_dict): 56 | return_dict = { 57 | "status": status, 58 | "body": json.dumps(body_dict), 59 | "headers": { 60 | "Content-Type": "application/json" 61 | } 62 | } 63 | return json.dumps(return_dict) 64 | #return func.HttpResponse( 65 | # json.dumps(return_dict), 66 | # status_code=status 67 | # ) 68 | 69 | def generate_sas_token (storage_account, storage_key, permission, token_ttl, container_name, blob_name = None ): 70 | sp = permission 71 | # Set start time to five minutes ago to avoid clock skew. 72 | st= str((datetime.utcnow() - timedelta(minutes=5) ).strftime("%Y-%m-%dT%H:%M:%SZ")) 73 | se= str((datetime.utcnow() + timedelta(hours=token_ttl)).strftime("%Y-%m-%dT%H:%M:%SZ")) 74 | srt = 'o' if blob_name else 'co' 75 | 76 | # Construct input value 77 | inputvalue = "{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}\n".format( 78 | storage_account, # 0. account name 79 | sp, # 1. signed permission (sp) 80 | 'b', # 2. signed service (ss) 81 | srt, # 3. signed resource type (srt) 82 | st, # 4. signed start time (st) 83 | se, # 5. signed expire time (se) 84 | '', # 6. signed ip 85 | 'https', # 7. signed protocol 86 | _AZURE_STORAGE_API_VERSION) # 8. signed version 87 | 88 | # Create base64 encoded signature 89 | hash =hmac.new( 90 | base64.b64decode(storage_key), 91 | inputvalue.encode(encoding='utf-8'), 92 | hashlib.sha256 93 | ).digest() 94 | 95 | sig = base64.b64encode(hash) 96 | 97 | querystring = { 98 | 'sv': _AZURE_STORAGE_API_VERSION, 99 | 'ss': 'b', 100 | 'srt': srt, 101 | 'sp': sp, 102 | 'se': se, 103 | 'st': st, 104 | 'spr': 'https', 105 | 'sig': sig, 106 | } 107 | sastoken = urllib.parse.urlencode(querystring) 108 | 109 | sas_url = None 110 | if blob_name: 111 | sas_url = "https://{0}.blob.core.windows.net/{1}/{2}?{3}".format( 112 | storage_account, 113 | container_name, 114 | blob_name, 115 | sastoken) 116 | else: 117 | sas_url = "https://{0}.blob.core.windows.net/{1}?{2}".format( 118 | storage_account, 119 | container_name, 120 | sastoken) 121 | 122 | return { 123 | 'token': sastoken, 124 | 'url' : sas_url 125 | } 126 | 127 | def main(req: func.HttpRequest) -> str: 128 | logging.info('Python HTTP trigger function processed a request.') 129 | 130 | # Get Azure Storage Connection String 131 | storage_account = None 132 | storage_key = None 133 | 134 | ll = connString.split(';') 135 | for l in ll: 136 | ss = l.split('=',1) 137 | if len(ss) != 2: 138 | continue 139 | if ss[0] == 'AccountName': 140 | storage_account = ss[1] 141 | if ss[0] == 'AccountKey': 142 | storage_key = ss[1] 143 | if not storage_account or not storage_key: 144 | return write_http_response( 145 | 400, 146 | { 'message': 'Function configuration error: NO Azure Storage connection string found!' } 147 | ) 148 | 149 | # Check HTTP Mehtod 150 | if req.method.lower() !=_ALLOWED_HTTP_METHOD.lower(): 151 | return write_http_response( 152 | 405, 153 | { 'message': 'Only POST HTTP Method is allowed' } 154 | ) 155 | 156 | permission = None 157 | container_name = None 158 | blob_name = None 159 | try: 160 | req_body = req.get_json() 161 | except ValueError: 162 | # Case: Empty body 163 | return write_http_response( 164 | 400, 165 | { 'message': 'Invalid HTTP request body' } 166 | ) 167 | else: 168 | # Case: Exception raised in get_json() 169 | if not 'req_body' in locals(): 170 | return write_http_response( 171 | 400, 172 | { 'message': 'Invalid HTTP request body' } 173 | ) 174 | # Case: Invalid parameters 175 | if not req_body.get('permission') or not req_body.get('container'): 176 | return write_http_response( 177 | 400, 178 | { 'message': 'Permission and container parameters must be included in HTTP request body' } 179 | ) 180 | 181 | permission = req_body.get('permission') 182 | container_name = req_body.get('container') 183 | blob_name = req_body.get('blobname') 184 | token_ttl = _SAS_TOKEN_DEFAULT_TTL 185 | if req_body.get('ttl'): 186 | token_ttl = int(req_body.get('ttl')) 187 | if token_ttl < 1: 188 | return write_http_response( 189 | 400, 190 | { 'message': 'Token ttl must be digit and more than 0' } 191 | ) 192 | 193 | # Generate SAS Token 194 | token_dict = generate_sas_token( 195 | storage_account, 196 | storage_key, 197 | permission, 198 | token_ttl, 199 | container_name, 200 | blob_name 201 | ) 202 | logging.info("Generated Token token=>{} url=>{}".format(token_dict['token'], token_dict['url'])) 203 | 204 | # Write HTTP Response 205 | return write_http_response(200, token_dict) 206 | -------------------------------------------------------------------------------- /v2functions/http-trigger-blob-sas-token/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "authLevel": "function", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "name": "req", 9 | "methods": [ 10 | "get", 11 | "post" 12 | ] 13 | }, 14 | { 15 | "type": "http", 16 | "direction": "out", 17 | "name": "$return" 18 | } 19 | ] 20 | } -------------------------------------------------------------------------------- /v2functions/http-trigger-blob-sas-token/readme.md: -------------------------------------------------------------------------------- 1 | # http-trigger-blob-sas-token (Python) 2 | 3 | - [http-trigger-blob-sas-token (Python)](#http-trigger-blob-sas-token-python) 4 | - [Configuration](#configuration) 5 | - [How to develop and publish the function](#how-to-develop-and-publish-the-function) 6 | - [Local development](#local-development) 7 | - [Publish the function to the cloud](#publish-the-function-to-the-cloud) 8 | - [API Format](#api-format) 9 | - [HTTP Request body format](#http-request-body-format) 10 | - [Response body format](#response-body-format) 11 | - [Examples](#examples) 12 | - [Get SAS Token to access blob files in Azure Blob Storage](#get-sas-token-to-access-blob-files-in-azure-blob-storage) 13 | - [Uploading files to Azure Blob Storage](#uploading-files-to-azure-blob-storage) 14 | 15 | | Sample | Description | Trigger | In Bindings | Out Bindings 16 | | ------------- | ------------- | ------------- | ----------- | ----------- | 17 | | `http-trigger-blob-sas-token` | Azure Function HTTP Trigger Python Sample that returns a SAS token for Azure Storage for the specified container and blob name | HTTP | NONE | HTTP | 18 | 19 | ## Configuration 20 | 21 | You need Azure Storage account for which you want the function to generate SAS token for specified container and blob name. 22 | 23 | Create an Azure Storage Account 24 | ```sh 25 | RESOURCE_GROUP="rg-testfunctions" 26 | REGION="japaneast" 27 | STORAGE_ACCOUNT="teststore" 28 | az storage account create --name $STORAGE_ACCOUNT \ 29 | --location $REGION \ 30 | --resource-group $RESOURCE_GROUP \ 31 | --sku Standard_LRS 32 | ``` 33 | 34 | Create a container in the storage you've created 35 | ```sh 36 | # Get Storage Key 37 | ACCESS_KEY=$(az storage account keys list --account-name $STORAGE_ACCOUNT --resource-group $RESOURCE_GROUP --output tsv |head -1 | awk '{print $3}') 38 | 39 | az storage container create \ 40 | --name "functiontest" \ 41 | --account-name $STORAGE_ACCOUNT \ 42 | --account-key $ACCESS_KEY 43 | ``` 44 | 45 | ## How to develop and publish the function 46 | ### Local development 47 | ```sh 48 | func host start 49 | ``` 50 | 51 | ### Publish the function to the cloud 52 | 53 | Publish the function to the cloud 54 | ```sh 55 | FUNCTION_APP_NAME="MyFunctionApp" 56 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 57 | ``` 58 | 59 | Add Functions App Settings 60 | ```sh 61 | FUNCTION_STORAGE_CONNECTION="*************" 62 | az webapp config appsettings set \ 63 | -n $FUNCTION_APP_NAME \ 64 | -g $RESOURCE_GROUP \ 65 | --settings \ 66 | MyStorageConnectionString=$FUNCTION_STORAGE_CONNECTION 67 | ``` 68 | 69 | ## API Format 70 | 71 | ### HTTP Request body format 72 | HTTP Request body must include the following parameters: 73 | ``` 74 | { 75 | 'permission': '', 76 | 'container': '', 77 | 'blobname': '' 78 | 'ttl': '' 79 | } 80 | ``` 81 | 82 | The following values can be used for permissions: 83 | `a` (Add), `r` (Read), `w` (Write), `d` (Delete), `l` (List) 84 | Concatenate multiple permissions, such as `rwa` = Read, Write, Add 85 | 86 | Sample Request Body 87 | ``` 88 | { 89 | 'permission': "rl", 90 | 'container': "functiontest", 91 | 'blobname': "sample.png" 92 | 'ttl': 2 93 | } 94 | ``` 95 | 96 | ### Response body format 97 | HTTP response body format is: 98 | ``` 99 | { 100 | 'token': '', 101 | 'url' : '' 102 | } 103 | ``` 104 | 105 | Sample Response Body 106 | ``` 107 | {"token": "sv=2018-03-28&ss=b&srt=o&sp=rl&se=2019-03-29T14%3A02%3A37Z&st=2019-03-29T11%3A57%3A37Z&spr=https&sig=Sh7RAa5MZBk7gfv0haCbEbllFXoiOWJDK9itzPeqURE%3D", "url": "https://MyFunctionApp.blob.core.windows.net/functiontest/sample.jpg?sv=2018-03-28&ss=b&srt=o&sp=rl&se=2019-03-29T14%3A02%3A37Z&st=2019-03-29T11%3A57%3A37Z&spr=https&sig=Sh7RAa5MZBk7gfv0haCbEbllFXoiOWJDK9itzPeqURE%3D" } 108 | ``` 109 | 110 | ## Examples 111 | ### Get SAS Token to access blob files in Azure Blob Storage 112 | 113 | There is a test request command - `scripts/send-test-blob-sas-token.sh` 114 | ```sh 115 | api_url="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 116 | api_key="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 117 | 118 | echo "Sending HTTP POST Request............." 119 | curl -s\ 120 | -H "Content-Type: application/json; charset=UTF-8"\ 121 | -H "x-functions-key: ${api_key}"\ 122 | -XPOST ${api_url} -d'{ 123 | "permission": "rl", 124 | "container": "functiontest", 125 | "blobname": "sample.png", 126 | "ttl": 1 127 | }' 128 | ``` 129 | 130 | Replace `api_url` and `api_key` with your values in the script and execute it. You'll get response back like this: 131 | 132 | ```json 133 | { 134 | "token": "sv=2018-03-28&ss=b&srt=o&sp=rl&se=2019-03-31T05%3A17%3A11Z&st=2019-03-31T03%3A12%3A11Z&spr=https&sig=A99ZFhDK2fwHnYl5Nd1dm%2Bcd1xJbolHz5wZLG9ewOvs%3D", 135 | "url": "https://MyFunctionApp.blob.core.windows.net/functiontest/sample.jpg?sv=2018-03-28&ss=b&srt=o&sp=rl&se=2019-03-31T05%3A17%3A11Z&st=2019-03-31T03%3A12%3A11Z&spr=https&sig=A99ZFhDK2fwHnYl5Nd1dm%2Bcd1xJbolHz5wZLG9ewOvs%3D" 136 | } 137 | ``` 138 | 139 | You can access to the blob with `url` that is included in the response body 140 | 141 | ```sh 142 | $ open https://MyFunctionApp.blob.core.windows.net/functiontest/sample.jpg?sv=2018-03-28&ss=b&srt=o&sp=rl&se=2019-03-31T05%3A17%3A11Z&st=2019-03-31T03%3A12%3A11Z&spr=https&sig=A99ZFhDK2fwHnYl5Nd1dm%2Bcd1xJbolHz5wZLG9ewOvs%3D 143 | ``` 144 | 145 | ### Uploading files to Azure Blob Storage 146 | 147 | Here is an example of uploading files to Azure Blob Storage using the http-trigger-blob-sas-token function. 148 | 149 | > [scripts/upload-blob-sas-token.py](../../scripts/upload-blob-sas-token.py) 150 | 151 | ```python 152 | import sys 153 | import os 154 | import ntpath 155 | import json 156 | import requests 157 | 158 | _AZFUNC_API_KEY="AZURE_FUNCTION_KEY: ex. aRVQ7Lj0vzDhY0JBYF8gpxYyEBxLwhO51JSC7X5dZFbTvROs7xNg==" 159 | _AZFUNC_API_URL="AZURE_FUNCTION_ENDPOINT: ex. https://.azurewebsites.net/api/" 160 | 161 | if __name__ == '__main__': 162 | 163 | file_path = "/tmp/test.jpg" 164 | content_type = "image/jpeg" 165 | container_name = "functiontest" 166 | 167 | file_name = ntpath.basename(file_path) 168 | 169 | ### Getting SAS token for uploading files to Azure Blob Storage 170 | payload = { 171 | "permission": "awl", 172 | "container": container_name, 173 | "blobname": file_name 174 | } 175 | r = requests.post(_AZFUNC_API_URL, 176 | headers = { 177 | "Content-Type" : "application/json; charset=UTF-8", 178 | "x-functions-key": _AZFUNC_API_KEY 179 | }, 180 | data=json.dumps(payload) 181 | ) 182 | if r.status_code != 200: 183 | print(f"Getting SAS token request result: status code={r.status_code}") 184 | sys.exit(1) 185 | 186 | content_dict = json.loads(r.content.decode()) 187 | url = content_dict['url'] 188 | 189 | ### Uploading files to Azure Blob Storage 190 | with open(file_path , 'rb') as filehandle: 191 | r = requests.put(url, 192 | data=filehandle, 193 | headers={ 194 | 'Content-Type': content_type, 195 | 'x-ms-blob-type': 'BlockBlob' 196 | }, 197 | params={ 198 | 'file': file_path 199 | } 200 | ) 201 | print(f"Uploading request result: status code={r.status_code}") 202 | ``` 203 | 204 | In the example above, you will upload `/tmp/test.jpg` to a container named `functiontest` in your Azure Blob Storage. 205 | -------------------------------------------------------------------------------- /v2functions/http-trigger-dump-request/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import json 3 | import azure.functions as func 4 | 5 | def main(req: func.HttpRequest) -> func.HttpResponse: 6 | logging.info('Python HTTP trigger function processed a request.') 7 | return func.HttpResponse( 8 | json.dumps({ 9 | 'method': req.method, 10 | 'url': req.url, 11 | 'headers': dict(req.headers), 12 | 'params': dict(req.params), 13 | 'get_body': req.get_body().decode() 14 | }) 15 | ) 16 | -------------------------------------------------------------------------------- /v2functions/http-trigger-dump-request/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "authLevel": "anonymous", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "name": "req" 9 | }, 10 | { 11 | "type": "http", 12 | "direction": "out", 13 | "name": "$return" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /v2functions/http-trigger-dump-request/readme.md: -------------------------------------------------------------------------------- 1 | # http-trigger-blob-sas-token (Python) 2 | 3 | - [http-trigger-blob-sas-token (Python)](#http-trigger-blob-sas-token-python) 4 | - [Local development](#local-development) 5 | - [Test access](#test-access) 6 | - [Publish the function to the cloud](#publish-the-function-to-the-cloud) 7 | 8 | | Sample | Description | Trigger | In Bindings | Out Bindings 9 | | ------------- | ------------- | ------------- | ----------- | ----------- | 10 | | [http-trigger-dump-request](v2functions/http-trigger-dump-request) | Azure Function HTTP Trigger Python Sample that returns request dump info with JSON format | HTTP | NONE | HTTP | 11 | 12 | 13 | ## Local development 14 | ```sh 15 | func host start 16 | ``` 17 | 18 | ## Test access 19 | ```sh 20 | curl -s http://localhost:7071/api/http-trigger-dump-request |jq 21 | { 22 | "method": "GET", 23 | "url": "http://localhost:7071/api/http-trigger-dump-request", 24 | "headers": { 25 | "accept": "*/*", 26 | "host": "localhost:8080", 27 | "user-agent": "curl/7.54.0" 28 | }, 29 | "params": {}, 30 | "get_body": "" 31 | } 32 | ``` 33 | 34 | ## Publish the function to the cloud 35 | 36 | Publish the function to the cloud 37 | ```sh 38 | FUNCTION_APP_NAME="MyFunctionApp" 39 | func azure functionapp publish $FUNCTION_APP_NAME 40 | ``` 41 | -------------------------------------------------------------------------------- /v2functions/http-trigger-onnx-model/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import azure.functions as func 3 | import onnxruntime 4 | from PIL import Image 5 | import numpy as np 6 | import io 7 | 8 | def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse: 9 | logging.info('Python HTTP trigger function processed a request.') 10 | 11 | body = req.get_body() 12 | 13 | try: 14 | image = Image.open(io.BytesIO(body)) 15 | except IOError: 16 | return func.HttpResponse( 17 | "Bad input. Unable to cast request body to an image format.", 18 | status_code=400 19 | ) 20 | 21 | result = run_inference(image, context) 22 | 23 | return func.HttpResponse(result) 24 | 25 | def run_inference(image, context): 26 | # See https://github.com/onnx/models/tree/master/vision/style_transfer/fast_neural_style 27 | # for implementation details 28 | model_path = f'{context.function_directory}/rain_princess.onnx' 29 | session = onnxruntime.InferenceSession(model_path) 30 | metadata = session.get_modelmeta() 31 | logging.info(f'Model metadata:\n' + 32 | f' Graph name: {metadata.graph_name}\n' + 33 | f' Model version: {metadata.version}\n' + 34 | f' Producer: {metadata.producer_name}') 35 | 36 | # Preprocess image 37 | original_image_size = image.size[0], image.size[1] 38 | logging.info('Preprocessing image...') 39 | # Model expects a 224x224 shape input 40 | image = image.resize((224, 224), Image.LANCZOS) 41 | bands = image.getbands() 42 | if bands == ('R', 'G', 'B'): 43 | logging.info(f'Image is RGB. No conversion necessary.') 44 | else: 45 | logging.info(f'Image is {bands}, converting to RGB...') 46 | image = image.convert('RGB') 47 | 48 | x = np.array(image).astype('float32') 49 | x = np.transpose(x, [2, 0, 1]) 50 | x = np.expand_dims(x, axis=0) 51 | 52 | output_name = session.get_outputs()[0].name 53 | input_name = session.get_inputs()[0].name 54 | logging.info('Running inference on ONNX model...') 55 | result = session.run([output_name], {input_name: x})[0][0] 56 | 57 | # Postprocess image 58 | result = np.clip(result, 0, 255) 59 | result = result.transpose(1,2,0).astype("uint8") 60 | img = Image.fromarray(result) 61 | max_width = 800 62 | height = int(max_width * original_image_size[1] / original_image_size[0]) 63 | # Upsample and correct aspect ratio for final image 64 | img = img.resize((max_width, height), Image.BICUBIC) 65 | 66 | # Store inferred image as in memory byte array 67 | img_byte_arr = io.BytesIO() 68 | # Convert composite to RGB so we can return JPEG 69 | img.convert('RGB').save(img_byte_arr, format='JPEG') 70 | final_image = img_byte_arr.getvalue() 71 | 72 | return final_image -------------------------------------------------------------------------------- /v2functions/http-trigger-onnx-model/example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v2functions/http-trigger-onnx-model/example.png -------------------------------------------------------------------------------- /v2functions/http-trigger-onnx-model/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "authLevel": "function", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "name": "req", 9 | "methods": [ 10 | "get", 11 | "post" 12 | ] 13 | }, 14 | { 15 | "type": "http", 16 | "direction": "out", 17 | "name": "$return" 18 | } 19 | ] 20 | } -------------------------------------------------------------------------------- /v2functions/http-trigger-onnx-model/rain_princess.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yokawasa/azure-functions-python-samples/34fa7302c8c4ce6c6a709afe2ed5cb355f057510/v2functions/http-trigger-onnx-model/rain_princess.onnx -------------------------------------------------------------------------------- /v2functions/http-trigger-onnx-model/readme.md: -------------------------------------------------------------------------------- 1 | # http-trigger-onnx-model (Python) 2 | 3 | | Sample | Description | Trigger | In Bindings | Out Bindings 4 | | ------------- | ------------- | ------------- | ----------- | ----------- | 5 | | `http-trigger-onnx-model` | This function demonstrates running an inference using an ONNX model. It is triggered by an HTTP request. See _[Try it out](#try-it-out)_ for usage. | HTTP | NONE | HTTP | 6 | 7 | The style transfer model used in this function is called _Rain Princess_. It is downloaded from the [ONNX Model Zoo][3]. 8 | 9 | Artistic style transfer models mix the content of an image with the style of another image. Examples of the styles can be seen [here][4]. 10 | 11 | Open Neural Network Exchange (ONNX) is an open standard format for representing machine learning models. ONNX is supported by a community of partners who have implemented it in many frameworks and tools. 12 | 13 | The ONNX Model Zoo is a collection of pre-trained, state-of-the-art models in the ONNX format contributed by community members like you. See https://github.com/onnx/models for more. 14 | 15 | You should be able to use other ONNX models in your function by rewriting the preprocess/postprocess code and wiring the expected inputs and outputs. 16 | 17 | ## Sample run 18 | ![Screenshot](example.png) 19 | This example is probably not going to age well. However the pun stands on its own. Shown here: [httpie][1], [imgcat][2]. 20 | 21 | ## Dependencies 22 | ``` 23 | Pillow==7.0.0 24 | onnxruntime==1.1.0 25 | numpy==1.18.1 26 | ``` 27 | 28 | Make sure you have these in your `requirements.txt` at the root of your function app. 29 | 30 | ## Configuration 31 | As specified in `functions.json`, this function is triggered by an HTTP request. It expects a POST request with raw image bytes (JPEG/PNG/whatever the Pillow library can open). Output is an HTTP response with the resulting style transferred image (JPEG encoded). 32 | 33 | ```json 34 | { 35 | "scriptFile": "__init__.py", 36 | "bindings": [ 37 | { 38 | "authLevel": "function", 39 | "type": "httpTrigger", 40 | "direction": "in", 41 | "name": "req", 42 | "methods": [ 43 | "post" 44 | ] 45 | }, 46 | { 47 | "type": "http", 48 | "direction": "out", 49 | "name": "$return" 50 | } 51 | ] 52 | } 53 | ``` 54 | 55 | ## How to develop and publish the functions 56 | 57 | ### Local development 58 | 59 | ```sh 60 | func host start 61 | ``` 62 | 63 | ### Try it out 64 | ```bash 65 | # Make a POST request 66 | $ curl -s --data-binary @babyyoda.jpg http://localhost:7071/api/http-trigger-onnx-model -o out.jpg 67 | 68 | # Open the resulting image (on a Mac) 69 | # Use feh or xdg-open on Linux 70 | $ open out.jpg 71 | ``` 72 | 73 | ### Publish the function to the cloud 74 | 75 | Publish the function to the cloud 76 | ```sh 77 | FUNCTION_APP_NAME="MyFunctionApp" 78 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 79 | ``` 80 | 81 | Add Functions App Settings 82 | ```sh 83 | FUNCTION_STORAGE_CONNECTION="*************" 84 | az webapp config appsettings set \ 85 | -n $FUNCTION_APP_NAME \ 86 | -g $RESOURCE_GROUP \ 87 | --settings \ 88 | MyStorageConnectionString=$FUNCTION_STORAGE_CONNECTION 89 | ``` 90 | 91 | 92 | [1]: https://httpie.org/ 93 | [2]: https://iterm2.com/documentation-images.html 94 | [3]: https://github.com/onnx/models/tree/master/vision/style_transfer/fast_neural_style 95 | [4]: https://github.com/pytorch/examples/tree/master/fast_neural_style#models 96 | -------------------------------------------------------------------------------- /v2functions/local.settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "IsEncrypted": false, 3 | "Values": { 4 | "FUNCTIONS_WORKER_RUNTIME": "python", 5 | "AzureWebJobsStorage": "", 6 | "MyCosmosDBConnectionString": "", 7 | "ComputerVisionSubscription": "", 8 | "ComputerVisionApiEndpoint": "" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /v2functions/local.settings.json.sample: -------------------------------------------------------------------------------- 1 | { 2 | "IsEncrypted": false, 3 | "Values": { 4 | "FUNCTIONS_WORKER_RUNTIME": "python", 5 | "AzureWebJobsStorage": "DefaultEndpointsProtocol=https;AccountName=teststore01;AccountKey=laRarya5VO50MRmoxa3xPA0SFCTH4fYymJLkkhGAhguF1rtjnaI+d+zR9Ha2g1t3sY9kGkM4+ZinvbD0VTHxew==;EndpointSuffix=core.windows.net", 6 | "MyCosmosDBConnectionString": "AccountEndpoint=https://testcosmos01.documents.azure.com:443/;AccountKey=XCU5aM6eTnnlJ9TE4dQJsLML2RIMBYe5jFGEdpRLLvdcr3YLbQ17s4ZOEQjxqU6qGVTfd37PeZKKD0YEXIJ08g==;", 7 | "ComputerVisionSubscription": "aatest72a28e47cd9ce2f984c0ee8sse", 8 | "ComputerVisionApiEndpoint": "https://westus.api.cognitive.microsoft.com/", 9 | "MyStorageConnectionString": "DefaultEndpointsProtocol=https;AccountName=teststore01;AccountKey=laRarya5VO50MRmoxa3xPA0SFCTH4fYymJLkkhGAhguF1rtjnaI+d+zR9Ha2g1t3sY9kGkM4+ZinvbD0VTHxew==;EndpointSuffix=core.windows.net" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /v2functions/queue-trigger-blob-in-out-binding/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import azure.functions as func 3 | 4 | def _rot13(c): 5 | if 'A' <= c and c <= 'Z': 6 | return chr((ord(c) - ord('A') + 13) % 26 + ord('A')) 7 | if 'a' <= c and c <= 'z': 8 | return chr((ord(c) - ord('a') + 13) % 26 + ord('a')) 9 | return c 10 | 11 | def process_rot13(s): 12 | g = (_rot13(c) for c in s) 13 | return ''.join(g) 14 | 15 | #def main(myitem: func.QueueMessage, inputblob: func.InputStream) -> None: 16 | def main(myitem: func.QueueMessage, 17 | inputblob: func.InputStream, outputblob: func.Out[str]) -> None: 18 | # 1. Print 'Item name' from Queue Message 19 | logging.info('Queue item id:%s, body:%s, expiration_time:%s', 20 | myitem.id, myitem.get_body().decode('utf-8'), myitem.expiration_time) 21 | 22 | # 2. Read Blob file (the File name is the same as the item name from Queue message ) 23 | clear_text = inputblob.read().decode('utf-8') 24 | logging.info("Clear text:%s'", clear_text) 25 | 26 | # 3. Process: Encrypt text with ROT13 encryption 27 | encrypted_text= process_rot13(clear_text) 28 | logging.info("Encrypted text:%s", encrypted_text) 29 | 30 | # 4. Write to Blob file: Write encrypted_text to blob file 31 | outputblob.set(encrypted_text) 32 | # import io 33 | # outputblob.set(io.StringIO(encrypted_text)) 34 | logging.info("Done storing encrypted text") -------------------------------------------------------------------------------- /v2functions/queue-trigger-blob-in-out-binding/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "myitem", 6 | "type": "queueTrigger", 7 | "direction": "in", 8 | "queueName": "itemsqueue", 9 | "connection": "MyStorageConnectionString" 10 | }, 11 | { 12 | "name": "inputblob", 13 | "type": "blob", 14 | "path": "inputitems/{queueTrigger}", 15 | "connection": "MyStorageConnectionString", 16 | "direction": "in" 17 | }, 18 | { 19 | "name": "outputblob", 20 | "type": "blob", 21 | "direction": "out", 22 | "connection": "MyStorageConnectionString", 23 | "path": "outputitems/{queueTrigger}" 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /v2functions/queue-trigger-blob-in-out-binding/readme.md: -------------------------------------------------------------------------------- 1 | # queue-trigger-blob-in-out-binding (Python) 2 | 3 | | Sample | Description | Trigger | In Bindings | Out Bindings 4 | | ------------- | ------------- | ------------- | ----------- | ----------- | 5 | | `queue-trigger-blob-in-out-binding` | Azure Functions Queue Trigger Python Sample. The function gets a file name from queue message, reads a blob file named the file name using Blob Input Binding, then ROT13 encodes the obtained clear text, and finally stores it into Azure Blob Storage using Blob Output Binding | Queue Storage | Blob Storage | Blob Storage | 6 | 7 | ## Configurations 8 | As specified in `functions.json`, you need Azure Storage account for triggering functions, input & output binding. 9 | 10 | ```json 11 | { 12 | "scriptFile": "__init__.py", 13 | "bindings": [ 14 | { 15 | "name": "myitem", 16 | "type": "queueTrigger", 17 | "direction": "in", 18 | "queueName": "itemsqueue", 19 | "connection": "MyStorageConnectionString" 20 | }, 21 | { 22 | "name": "inputblob", 23 | "type": "blob", 24 | "path": "inputitems/{queueTrigger}", 25 | "connection": "MyStorageConnectionString", 26 | "direction": "in" 27 | }, 28 | { 29 | "name": "outputblob", 30 | "type": "blob", 31 | "direction": "out", 32 | "connection": "MyStorageConnectionString", 33 | "path": "outputitems/{queueTrigger}" 34 | } 35 | ] 36 | } 37 | ``` 38 | 39 | ### Create Azure Storage Account 40 | 41 | Create an Azure Storage Account 42 | ```sh 43 | RESOURCE_GROUP="rg-testfunctions" 44 | REGION="japaneast" 45 | STORAGE_ACCOUNT="teststore" 46 | az storage account create --name $STORAGE_ACCOUNT \ 47 | --location $REGION \ 48 | --resource-group $RESOURCE_GROUP \ 49 | --sku Standard_LRS 50 | ``` 51 | 52 | ### Create Blob Storage Containers 53 | 54 | Create 2 blob containers in the storage you've created: `inputitems` and `outputitems` 55 | ```sh 56 | # Get Storage Key 57 | ACCESS_KEY=$(az storage account keys list --account-name $STORAGE_ACCOUNT --resource-group $RESOURCE_GROUP --output tsv |head -1 | awk '{print $3}') 58 | 59 | az storage container create \ 60 | --name "inputitems" \ 61 | --account-name $STORAGE_ACCOUNT \ 62 | --account-key $ACCESS_KEY 63 | 64 | az storage container create \ 65 | --name "outputitems" \ 66 | --account-name $STORAGE_ACCOUNT \ 67 | --account-key $ACCESS_KEY 68 | ``` 69 | 70 | ### Create Queue in Queue Storage 71 | 72 | Create a queue in the storage you've created: `itemsqueue` 73 | 74 | ```sh 75 | # Get Storage Key 76 | ACCESS_KEY=$(az storage account keys list --account-name $STORAGE_ACCOUNT --resource-group $RESOURCE_GROUP --output tsv |head -1 | awk '{print $3}') 77 | 78 | az storage queue create \ 79 | --name "itemsqueue" \ 80 | --account-name $STORAGE_ACCOUNT \ 81 | --account-key $ACCESS_KEY 82 | ``` 83 | 84 | ## How to develop and publish the functions 85 | 86 | ### Local development 87 | 88 | ```sh 89 | func host start 90 | ``` 91 | 92 | ### Publish the function to the cloud 93 | 94 | Publish the function to the cloud 95 | ```sh 96 | FUNCTION_APP_NAME="MyFunctionApp" 97 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 98 | ``` 99 | 100 | Add Functions App Settings 101 | ```sh 102 | FUNCTION_STORAGE_CONNECTION="*************" 103 | az webapp config appsettings set \ 104 | -n $FUNCTION_APP_NAME \ 105 | -g $RESOURCE_GROUP \ 106 | --settings \ 107 | MyStorageConnectionString=$FUNCTION_STORAGE_CONNECTION 108 | ``` -------------------------------------------------------------------------------- /v2functions/requirements.txt: -------------------------------------------------------------------------------- 1 | # Minimum packages for azure functions 2 | azure-functions 3 | azure-functions-worker 4 | grpcio~=1.20.1 5 | grpcio-tools~=1.20.1 6 | protobuf==3.6.1 7 | six==1.11.0 8 | # Additional packages 9 | requests==2.20.1 10 | feedparser==5.2.1 11 | Pillow==8.2.0 12 | numpy==1.18.1 13 | onnxruntime==1.4.0 14 | -------------------------------------------------------------------------------- /v2functions/sbqueue-trigger-sbqueue-out-binding/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import azure.functions as func 3 | 4 | def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): 5 | body = msgIn.get_body().decode('utf-8') 6 | logging.info(f'Processed Service Bus Queue message: {body}') 7 | msgOut.set(body) 8 | -------------------------------------------------------------------------------- /v2functions/sbqueue-trigger-sbqueue-out-binding/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "msgIn", 6 | "type": "serviceBusTrigger", 7 | "direction": "in", 8 | "queueName": "inqueue", 9 | "connection": "ServiceBusNamespaceConnectionString" 10 | }, 11 | { 12 | "name": "msgOut", 13 | "type": "serviceBus", 14 | "direction": "out", 15 | "connection": "ServiceBusNamespaceConnectionString", 16 | "queueName": "outqueue" 17 | } 18 | ] 19 | } -------------------------------------------------------------------------------- /v2functions/sbqueue-trigger-sbqueue-out-binding/readme.md: -------------------------------------------------------------------------------- 1 | # sbqueue-trigger-sbqueue-out-binding (Python) 2 | 3 | | Sample | Description | Trigger | In Bindings | Out Bindings 4 | | ------------- | ------------- | ------------- | ----------- | ----------- | 5 | | `sbqueue-trigger-sbqueue-out-binding` | Azure Functions Service Bus Queue Trigger Python Sample. The function demonstrates reading from a Service Bus queue and placing a new message into a Service Bus queue. | Service Bus Queue | None | Service Bus Queue | 6 | 7 | ## Configurations 8 | 9 | `function.json`: 10 | 11 | ```json 12 | { 13 | "scriptFile": "__init__.py", 14 | "bindings": [ 15 | { 16 | "name": "msgIn", 17 | "type": "serviceBusTrigger", 18 | "direction": "in", 19 | "queueName": "inqueue", 20 | "connection": "ServiceBusNamespaceConnectionString" 21 | }, 22 | { 23 | "name": "msgOut", 24 | "type": "serviceBus", 25 | "direction": "out", 26 | "connection": "ServiceBusNamespaceConnectionString", 27 | "queueName": "outqueue" 28 | } 29 | ] 30 | } 31 | ``` 32 | 33 | ### Create the Service Bus Queues 34 | 35 | Create two Service Bus Queues in the namespace used in `ServiceBusNamespaceConnectionString`, `inqueue` and `outqueue`: 36 | 37 | ```sh 38 | az servicebus queue create --name inqueue \ 39 | --resource-group $RESOURCE_GROUP \ 40 | --namespace-name $SERVICEBUS_NAMESPACE 41 | 42 | az servicebus queue create --name outqueue \ 43 | --resource-group $RESOURCE_GROUP \ 44 | --namespace-name $SERVICEBUS_NAMESPACE 45 | ``` 46 | 47 | ## How to develop and publish the functions 48 | 49 | ### Local development 50 | 51 | ```sh 52 | func host start 53 | ``` 54 | 55 | ### Publish the function to the cloud 56 | 57 | Publish the function to the cloud 58 | ```sh 59 | FUNCTION_APP_NAME="MyFunctionApp" 60 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 61 | ``` 62 | 63 | Add Functions App Settings 64 | ```sh 65 | FUNCTION_STORAGE_CONNECTION="*************" 66 | az webapp config appsettings set \ 67 | -n $FUNCTION_APP_NAME \ 68 | -g $RESOURCE_GROUP \ 69 | --settings \ 70 | MyStorageConnectionString=$FUNCTION_STORAGE_CONNECTION 71 | ``` -------------------------------------------------------------------------------- /v2functions/timer-trigger-cosmosdb-output-binding/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | import hashlib 3 | import datetime 4 | import logging 5 | import feedparser 6 | 7 | import azure.functions as func 8 | 9 | RSS_FEED_URL = "https://kubernetes.io/feed.xml" 10 | 11 | def get_feed(): 12 | feed=feedparser.parse(RSS_FEED_URL) 13 | retdocs=[] 14 | # Get 5 latest feeds 15 | latestentries=feed['entries'][:5] 16 | for entry in latestentries: 17 | idhash = hashlib.sha1( entry[ 'link' ].encode('utf-8')).hexdigest() 18 | retdoc= { 19 | "id": idhash, 20 | "title": entry[ 'title' ], 21 | "date": entry[ 'updated' ] 22 | } 23 | retdocs.append(retdoc) 24 | return retdocs 25 | 26 | def main(mytimer: func.TimerRequest, outdoc: func.Out[func.Document]): 27 | utc_timestamp = datetime.datetime.utcnow().replace( 28 | tzinfo=datetime.timezone.utc).isoformat() 29 | if mytimer.past_due: 30 | logging.info('The timer is past due!') 31 | logging.info('Python timer trigger function ran at %s', utc_timestamp) 32 | 33 | try: 34 | # Get Blog feeds 35 | outdata = {} 36 | outdata['items'] = get_feed() 37 | # logging.info(outdata) # for debug 38 | 39 | # Store output data using Cosmos DB output binding 40 | outdoc.set(func.Document.from_json(json.dumps(outdata))) 41 | except Exception as e: 42 | logging.error('Error:') 43 | logging.error(e) -------------------------------------------------------------------------------- /v2functions/timer-trigger-cosmosdb-output-binding/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "mytimer", 6 | "type": "timerTrigger", 7 | "direction": "in", 8 | "schedule": "0 1 * * * *" 9 | }, 10 | { 11 | "direction": "out", 12 | "type": "cosmosDB", 13 | "name": "outdoc", 14 | "databaseName": "testdb", 15 | "collectionName": "feedcol", 16 | "leaseCollectionName": "leases", 17 | "createLeaseCollectionIfNotExists": true, 18 | "connectionStringSetting": "MyCosmosDBConnectionString", 19 | "createIfNotExists": true 20 | } 21 | ] 22 | } -------------------------------------------------------------------------------- /v2functions/timer-trigger-cosmosdb-output-binding/readme.md: -------------------------------------------------------------------------------- 1 | # timer-trigger-cosmosdb-output-binding (Python) 2 | 3 | | Sample | Description | Trigger | In Bindings | Out Bindings 4 | | ------------- | ------------- | ------------- | ----------- | ----------- | 5 | | `timer-trigger-cosmos-output-binding` | Azure Functions Timer Trigger Python Sample. The function gets blog RSS feed and store the results into CosmosDB using Cosmos DB output binding | Timer | NONE | CosmosDB | 6 | 7 | ## How it works 8 | 9 | For a `TimerTrigger` to work, you provide a schedule in the form of a [cron expression](https://en.wikipedia.org/wiki/Cron#CRON_expression)(See the link for full details). A cron expression is a string with 6 separate expressions which represent a given schedule via patterns. The pattern used in this sample (`0 1 * * * *`) is to represent "once a day at 1:00 am". The following is Cron schedule pattern samples: 10 | 11 | ```txt 12 | # every 5 minutes 13 | 0 */5 * * * * 14 | 15 | # Run every 6 hours at 10 mins past the hour 16 | 10 */6 * * * * 17 | 18 | # Run at 1:00 am 19 | 0 1 * * * * 20 | 21 | # Run at 5:31 pm: 22 | 31 17 * * * * 23 | ``` 24 | 25 | ## Configurations 26 | As specified in `functions.json`, you need Azure Cosmos DB account for storing data using Cosmos DB output binding 27 | 28 | ```json 29 | { 30 | "scriptFile": "__init__.py", 31 | "bindings": [ 32 | { 33 | "name": "mytimer", 34 | "type": "timerTrigger", 35 | "direction": "in", 36 | "schedule": "0 1 * * * *" 37 | }, 38 | { 39 | "direction": "out", 40 | "type": "cosmosDB", 41 | "name": "outdoc", 42 | "databaseName": "testdb", 43 | "collectionName": "feedcol", 44 | "leaseCollectionName": "leases", 45 | "createLeaseCollectionIfNotExists": true, 46 | "connectionStringSetting": "MyCosmosDBConnectionString", 47 | "createIfNotExists": true 48 | } 49 | ] 50 | } 51 | ``` 52 | 53 | ### Create Cosmos DB Account and DB & Collection for testing 54 | 55 | Create a Cosmos DB Account 56 | ```sh 57 | COSMOSDB_ACCOUNT_NAME="azfuncv2db" 58 | RESOURCE_GROUP="RG-azfuncv2" 59 | az cosmosdb create \ 60 | --name $COSMOSDB_ACCOUNT_NAME \ 61 | --kind GlobalDocumentDB \ 62 | --resource-group $RESOURCE_GROUP 63 | ``` 64 | 65 | Create Database and Collection in the Cosmos DB that you've created 66 | 67 | ```sh 68 | # Get Key 69 | COSMOSDB_KEY=$(az cosmosdb list-keys --name $COSMOSDB_ACCOUNT_NAME --resource-group $RESOURCE_GROUP --output tsv |awk '{print $1}') 70 | 71 | # Create Database 72 | az cosmosdb database create \ 73 | --name $COSMOSDB_ACCOUNT_NAME \ 74 | --db-name $DATABASE_NAME \ 75 | --key $COSMOSDB_KEY \ 76 | --resource-group $RESOURCE_GROUP 77 | 78 | # Create a container with a partition key and provision 400 RU/s throughput. 79 | COLLECTION_NAME="feedcol" 80 | az cosmosdb collection create \ 81 | --resource-group $RESOURCE_GROUP \ 82 | --collection-name $COLLECTION_NAME \ 83 | --name $COSMOSDB_ACCOUNT_NAME \ 84 | --db-name $DATABASE_NAME \ 85 | --partition-key-path /title \ 86 | --throughput 400 87 | 88 | # Create a container for leaves 89 | # 'leaves' need to be a single collection partition 90 | # Please see also: https://github.com/Azure/azure-functions-core-tools/issues/930 91 | LEASES_COLLECTION_NAME="leases" 92 | az cosmosdb collection create \ 93 | --resource-group $RESOURCE_GROUP \ 94 | --collection-name $LEASES_COLLECTION_NAME \ 95 | --name $COSMOSDB_ACCOUNT_NAME \ 96 | --db-name $DATABASE_NAME \ 97 | --throughput 400 98 | ``` 99 | 100 | ## How to develop and publish the function 101 | ### Local development 102 | ```sh 103 | func host start 104 | ``` 105 | 106 | ### Publish the function to the cloud 107 | 108 | Publish the function to the cloud 109 | ```sh 110 | FUNCTION_APP_NAME="MyFunctionApp" 111 | func azure functionapp publish $FUNCTION_APP_NAME --build-native-deps --no-bundler 112 | ``` 113 | 114 | Add Functions App Settings 115 | ```sh 116 | COSMOS_DB_CONNECTION="***************" 117 | az webapp config appsettings set \ 118 | -n $FUNCTION_APP_NAME \ 119 | -g $RESOURCE_GROUP \ 120 | --settings \ 121 | MyCosmosDBConnectionString=$COSMOS_DB_CONNECTION 122 | ``` 123 | 124 | ### Sample output data stored in CosmosDB 125 | 126 | If all goes successfully and feed results are stored in the Cosmos DB that you've created, you'll see the document like this: 127 | 128 | ``` 129 | { 130 | "items": [ 131 | { 132 | "id": "dbe7a29c0de2807a5f17e9a3a63bc5f3378bc815", 133 | "title": "Blog: Update on Volume Snapshot Alpha for Kubernetes", 134 | "date": "Thu, 17 Jan 2019 00:00:00 +0000" 135 | }, 136 | { 137 | "id": "9eda61a9ba684f1acb9a03c8b709f51df80c905a", 138 | "title": "Blog: Container Storage Interface (CSI) for Kubernetes GA", 139 | "date": "Tue, 15 Jan 2019 00:00:00 +0000" 140 | }, 141 | { 142 | "id": "6c24aea9a7d60f8697c3cfe5328e1c86196facbc", 143 | "title": "Blog: APIServer dry-run and kubectl diff", 144 | "date": "Mon, 14 Jan 2019 00:00:00 +0000" 145 | }, 146 | { 147 | "id": "00c9181702e3e237bb150d248ffcc27796f8774f", 148 | "title": "Blog: Kubernetes Federation Evolution", 149 | "date": "Wed, 12 Dec 2018 00:00:00 +0000" 150 | }, 151 | { 152 | "id": "fe818ab1fb07e0a95e2a0e7bf754c15ab95bf6b8", 153 | "title": "Blog: etcd: Current status and future roadmap", 154 | "date": "Tue, 11 Dec 2018 00:00:00 +0000" 155 | } 156 | ], 157 | "id": "ab8f4110-0692-4875-9d0f-b864cca603c6", 158 | "_rid": "dCoKAPwUdioBAAAAAAAAAA==", 159 | "_self": "dbs/dCoKAA==/colls/dCoKAPwUdio=/docs/dCoKAPwUdioBAAAAAAAAAA==/", 160 | "_etag": "\"0d004e82-0000-0000-0000-5c544bf70000\"", 161 | "_attachments": "attachments/", 162 | "_ts": 1549028343 163 | } 164 | ``` 165 | --------------------------------------------------------------------------------