├── tests ├── utils │ ├── __init__.py │ └── testutils.py ├── __init__.py ├── decorators │ ├── test_function_name.py │ ├── test_warmup.py │ ├── test_timer.py │ ├── test_retry_policy.py │ ├── test_queue.py │ ├── test_http.py │ ├── test_table.py │ ├── test_eventhub.py │ ├── test_sql.py │ ├── test_mysql.py │ ├── test_generic.py │ ├── test_eventgrid.py │ ├── test_blob.py │ ├── test_core.py │ └── test_kafka.py ├── test_warmup.py ├── test_timer.py ├── test_code_quality.py ├── test_mcp.py └── test_eventgrid.py ├── azure ├── functions │ ├── _thirdparty │ │ └── __init__.py │ ├── py.typed │ ├── extension │ │ ├── function_extension_exception.py │ │ ├── __init__.py │ │ ├── extension_hook_meta.py │ │ ├── func_extension_hooks.py │ │ ├── app_extension_hooks.py │ │ ├── extension_scope.py │ │ ├── app_extension_base.py │ │ └── func_extension_base.py │ ├── decorators │ │ ├── function_name.py │ │ ├── warmup.py │ │ ├── timer.py │ │ ├── __init__.py │ │ ├── retry_policy.py │ │ ├── queue.py │ │ ├── generic.py │ │ ├── eventhub.py │ │ ├── http.py │ │ ├── constants.py │ │ ├── table.py │ │ ├── eventgrid.py │ │ ├── blob.py │ │ ├── sql.py │ │ ├── mysql.py │ │ ├── servicebus.py │ │ ├── mcp.py │ │ ├── dapr.py │ │ └── utils.py │ ├── warmup.py │ ├── _kafka.py │ ├── _cosmosdb.py │ ├── timer.py │ ├── _jsonutils.py │ ├── _sql.py │ ├── _mysql.py │ ├── mcp.py │ ├── sql.py │ ├── cosmosdb.py │ ├── mysql.py │ ├── _queue.py │ ├── _eventgrid.py │ ├── _eventhub.py │ ├── _utils.py │ ├── __init__.py │ ├── _durable_functions.py │ ├── eventgrid.py │ ├── blob.py │ ├── queue.py │ ├── http.py │ └── durable_functions.py └── __init__.py ├── MANIFEST.in ├── .github ├── labeler.yml ├── workflows │ ├── pr_title_enforcer.yml │ ├── ISSUE_TEMPLATE │ │ └── bug_report.md │ └── label.yml └── ISSUE_TEMPLATE │ └── the-issues-in-this-repository-are-no-longer-monitored.md ├── .azuredevops └── dependabot.yml ├── .flake8 ├── eng ├── ci │ ├── code-mirror.yml │ ├── library-release.yml │ ├── official-build.yml │ └── public-build.yml └── templates │ ├── jobs │ ├── ci-tests.yml │ └── build.yml │ └── official │ └── jobs │ └── build-artifacts.yml ├── CODEOWNERS ├── setup.cfg ├── .coveragerc ├── LICENSE ├── pyproject.toml ├── .gitignore ├── SECURITY.md └── README.md /tests/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /azure/functions/_thirdparty/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /azure/functions/py.typed: -------------------------------------------------------------------------------- 1 | # Marker file for PEP 561. The azure.functions package uses inline types. 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include azure *.py *.pyi 2 | recursive-include tests *.py 3 | include LICENSE README.md 4 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | # Add 'SDKBreakingChange' label if any changes detected within __init__.py file 2 | SDKBreakingChange: 3 | - '**/__init__.py' 4 | -------------------------------------------------------------------------------- /.azuredevops/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Mirrored repository. We use dependabot via GitHub, not Azure DevOps. 2 | version: 2 3 | enable-security-updates: false 4 | enable-campaigned-updates: false 5 | -------------------------------------------------------------------------------- /azure/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | __path__ = __import__('pkgutil').extend_path(__path__, __name__) 5 | -------------------------------------------------------------------------------- /azure/functions/extension/function_extension_exception.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | 5 | class FunctionExtensionException(Exception): 6 | """Exception emitted from Azure Functions Python Worker extension 7 | """ 8 | pass 9 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = W503,E402,E731 3 | exclude = 4 | .git, __pycache__, build, dist, .eggs, .github, .local, 5 | Samples, azure/functions/_thirdparty, docs/, .venv*/, .env*/, .vscode/, venv*, pyvenv* 6 | max-line-length = 100 7 | extend-ignore = D401, D101, D105, D400, D205, D102, D100, D107, DAR401, DAR101, DAR201 8 | strictness = short 9 | -------------------------------------------------------------------------------- /eng/ci/code-mirror.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | branches: 3 | include: 4 | - dev 5 | - release/* 6 | 7 | resources: 8 | repositories: 9 | - repository: eng 10 | type: git 11 | name: engineering 12 | ref: refs/tags/release 13 | 14 | variables: 15 | - template: ci/variables/cfs.yml@eng 16 | 17 | extends: 18 | template: ci/code-mirror.yml@eng 19 | -------------------------------------------------------------------------------- /azure/functions/extension/__init__.py: -------------------------------------------------------------------------------- 1 | from .extension_meta import ExtensionMeta 2 | from .function_extension_exception import FunctionExtensionException 3 | from .app_extension_base import AppExtensionBase 4 | from .func_extension_base import FuncExtensionBase 5 | 6 | __all__ = [ 7 | 'ExtensionMeta', 8 | 'FunctionExtensionException', 9 | 'AppExtensionBase', 10 | 'FuncExtensionBase' 11 | ] 12 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/about-codeowners/ 2 | # for more info about CODEOWNERS file 3 | # 4 | # It uses the same pattern rule for gitignore file 5 | # https://git-scm.com/docs/gitignore#_pattern_format 6 | # 7 | 8 | # 9 | # AZURE FUNCTIONS TEAM 10 | # For all file changes, github would automatically include the following people in the PRs. 11 | # 12 | 13 | * @vrdmr @gavin-aguiar @hallvictoria 14 | -------------------------------------------------------------------------------- /.github/workflows/pr_title_enforcer.yml: -------------------------------------------------------------------------------- 1 | name: "PR Title Enforcer" 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | permissions: 11 | pull-requests: read 12 | 13 | jobs: 14 | main: 15 | name: Validate PR title 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: amannn/action-semantic-pull-request@v5 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[BUG] Bug Title" 5 | --- 6 | 7 | 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | addopts = --capture=no --assert=plain --strict --tb native 3 | testpaths = tests 4 | 5 | [mypy] 6 | python_version = 3.6 7 | check_untyped_defs = True 8 | warn_redundant_casts = True 9 | warn_unused_ignores = True 10 | warn_unused_configs = True 11 | strict_optional = True 12 | warn_return_any = True 13 | disallow_subclassing_any = True 14 | ignore_missing_imports = True 15 | 16 | [mypy-azure.functions._thirdparty.*] 17 | ignore_errors = True 18 | -------------------------------------------------------------------------------- /azure/functions/decorators/function_name.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from azure.functions.decorators.core import Setting 5 | 6 | FUNCTION_NAME = "function_name" 7 | 8 | 9 | class FunctionName(Setting): 10 | 11 | def __init__(self, function_name: str, 12 | **kwargs): 13 | self.function_name = function_name 14 | super().__init__(setting_name=FUNCTION_NAME) 15 | -------------------------------------------------------------------------------- /.github/workflows/label.yml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | pull_request_target: 4 | paths: 5 | - '**/__init__.py' 6 | jobs: 7 | triage: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | contents: write 11 | pull-requests: write 12 | steps: 13 | # Add labels based on file changes using label.yml 14 | - uses: actions/labeler@v4 15 | with: 16 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 17 | configuration-path: .github/labeler.yml 18 | -------------------------------------------------------------------------------- /eng/ci/library-release.yml: -------------------------------------------------------------------------------- 1 | pr: none 2 | 3 | resources: 4 | repositories: 5 | - repository: 1es 6 | type: git 7 | name: 1ESPipelineTemplates/1ESPipelineTemplates 8 | ref: refs/tags/release 9 | 10 | extends: 11 | template: v1/1ES.Official.PipelineTemplate.yml@1es 12 | parameters: 13 | pool: 14 | name: 1es-pool-azfunc 15 | image: 1es-windows-2022 16 | os: windows 17 | 18 | stages: 19 | - stage: Release 20 | jobs: 21 | - template: /eng/templates/official/jobs/publish-release.yml@self 22 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | """Bootstrap for '$ python setup.py test' command.""" 5 | 6 | import os.path 7 | import sys 8 | import unittest 9 | import unittest.runner 10 | 11 | 12 | def suite(): 13 | test_loader = unittest.TestLoader() 14 | return test_loader.discover( 15 | os.path.dirname(__file__), pattern='test_*.py') 16 | 17 | 18 | if __name__ == '__main__': 19 | runner = unittest.runner.TextTestRunner() 20 | result = runner.run(suite()) 21 | sys.exit(not result.wasSuccessful()) 22 | -------------------------------------------------------------------------------- /azure/functions/extension/extension_hook_meta.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from typing import Callable, NamedTuple 5 | 6 | 7 | class ExtensionHookMeta(NamedTuple): 8 | """The metadata of a single life-cycle hook. 9 | The ext_name has the class name of an extension class. 10 | The ext_impl has the callable function that is used by the worker. 11 | """ 12 | ext_name: str 13 | ext_impl: Callable 14 | 15 | # When adding more fields, make sure they have default values (e.g. 16 | # ext_new_field: Optional[str] = None 17 | -------------------------------------------------------------------------------- /azure/functions/warmup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import typing 5 | 6 | from . import meta 7 | 8 | 9 | class WarmUpContext: 10 | pass 11 | 12 | 13 | class WarmUpTriggerConverter(meta.InConverter, binding='warmupTrigger', 14 | trigger=True): 15 | 16 | @classmethod 17 | def check_input_type_annotation(cls, pytype: type) -> bool: 18 | return issubclass(pytype, WarmUpContext) 19 | 20 | @classmethod 21 | def decode(cls, data: meta.Datum, *, trigger_metadata) -> typing.Any: 22 | return WarmUpContext() 23 | -------------------------------------------------------------------------------- /azure/functions/decorators/warmup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import WARMUP_TRIGGER 6 | from azure.functions.decorators.core import Trigger, DataType 7 | 8 | 9 | class WarmUpTrigger(Trigger): 10 | @staticmethod 11 | def get_binding_name() -> str: 12 | return WARMUP_TRIGGER 13 | 14 | def __init__(self, 15 | name: str, 16 | data_type: Optional[DataType] = None, 17 | **kwargs) -> None: 18 | super().__init__(name=name, data_type=data_type) 19 | -------------------------------------------------------------------------------- /tests/decorators/test_function_name.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.function_name import FunctionName 6 | 7 | 8 | class TestFunctionName(unittest.TestCase): 9 | 10 | def test_retry_policy_setting_creation(self): 11 | function_name = FunctionName(function_name="TestFunctionName") 12 | 13 | self.assertEqual(function_name.get_setting_name(), "function_name") 14 | self.assertEqual(function_name.get_dict_repr(), 15 | {'setting_name': 'function_name', 16 | 'function_name': 'TestFunctionName'}) 17 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/the-issues-in-this-repository-are-no-longer-monitored.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: The issues in this repository are no longer monitored 3 | about: Redirect bug reports to Azure/azure-functions-python-worker 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | --- 11 | name: The issues in this repository are no longer monitored 12 | about: Redirect bug reports to Azure/azure-functions-python-worker 13 | title: "Please create bug reports in Azure/azure-functions-python-worker instead" 14 | --- 15 | 16 | **Please report the issue to Azure/azure-functions-python-worker via the following link: 17 | https://github.com/Azure/azure-functions-python-worker/issues/new?assignees=&labels=&template=bug_report.md&title=%5BBUG%5D+Bug+Title** 18 | -------------------------------------------------------------------------------- /azure/functions/extension/func_extension_hooks.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from typing import NamedTuple, List 5 | from .extension_hook_meta import ExtensionHookMeta 6 | 7 | 8 | class FuncExtensionHooks(NamedTuple): 9 | """The definition of which type of function hooks are supported in SDK. 10 | ExtensionMeta will lookup the FuncExtension life-cycle type from here. 11 | """ 12 | # The default value ([] empty list) is not being set here intentionally 13 | # since it is impacted by a Python bug https://bugs.python.org/issue33077. 14 | post_function_load: List[ExtensionHookMeta] 15 | pre_invocation: List[ExtensionHookMeta] 16 | post_invocation: List[ExtensionHookMeta] 17 | -------------------------------------------------------------------------------- /azure/functions/extension/app_extension_hooks.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from typing import NamedTuple, List 5 | from .extension_hook_meta import ExtensionHookMeta 6 | 7 | 8 | class AppExtensionHooks(NamedTuple): 9 | """The definition of which type of global hooks are supported in SDK. 10 | ExtensionMeta will lookup the AppExtension life-cycle type from here. 11 | """ 12 | # The default value ([] empty list) is not being set here intentionally 13 | # since it is impacted by a Python bug https://bugs.python.org/issue33077. 14 | post_function_load_app_level: List[ExtensionHookMeta] 15 | pre_invocation_app_level: List[ExtensionHookMeta] 16 | post_invocation_app_level: List[ExtensionHookMeta] 17 | -------------------------------------------------------------------------------- /azure/functions/_kafka.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import abc 5 | import typing 6 | 7 | 8 | class AbstractKafkaEvent(abc.ABC): 9 | 10 | @abc.abstractmethod 11 | def get_body(self) -> bytes: 12 | pass 13 | 14 | @property 15 | @abc.abstractmethod 16 | def key(self) -> typing.Optional[str]: 17 | pass 18 | 19 | @property 20 | @abc.abstractmethod 21 | def offset(self) -> typing.Optional[int]: 22 | pass 23 | 24 | @property 25 | @abc.abstractmethod 26 | def partition(self) -> typing.Optional[int]: 27 | pass 28 | 29 | @property 30 | @abc.abstractmethod 31 | def topic(self) -> typing.Optional[str]: 32 | pass 33 | 34 | @property 35 | @abc.abstractmethod 36 | def timestamp(self) -> typing.Optional[str]: 37 | pass 38 | -------------------------------------------------------------------------------- /eng/templates/jobs/ci-tests.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: "TestPython" 3 | displayName: "Run Python SDK Unit Tests" 4 | 5 | strategy: 6 | matrix: 7 | python-310: 8 | PYTHON_VERSION: '3.10' 9 | python-311: 10 | PYTHON_VERSION: '3.11' 11 | python-312: 12 | PYTHON_VERSION: '3.12' 13 | python-313: 14 | PYTHON_VERSION: '3.13' 15 | python-314: 16 | PYTHON_VERSION: '3.14' 17 | 18 | steps: 19 | - task: UsePythonVersion@0 20 | inputs: 21 | versionSpec: $(PYTHON_VERSION) 22 | - bash: | 23 | python -m pip install --upgrade pip 24 | python -m pip install -U -e .[dev] 25 | displayName: 'Install dependencies' 26 | - bash: | 27 | python -m pytest --cache-clear --cov=./azure --cov-report=xml --cov-branch tests 28 | displayName: 'Test with pytest' -------------------------------------------------------------------------------- /azure/functions/extension/extension_scope.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from enum import Enum 5 | 6 | 7 | class ExtensionScope(Enum): 8 | """There are two valid scopes of the worker extension framework. 9 | 10 | UNKNOWN: 11 | If an extension does not have the _scope field defined, the extension 12 | is in the unknown scope. This marks the extension in an invalid state. 13 | 14 | APPLICATION: 15 | It is injected in AppExtensionBase._scope. Any implementation of 16 | AppExtensionBase will be applied into all triggers. 17 | 18 | FUNCTION: 19 | It is injected in FuncExtensionBase._scope. Any implementation of 20 | FuncExtensionBase requires initialization in customer's function app 21 | trigger. 22 | """ 23 | UNKNOWN = 0 24 | APPLICATION = 1 25 | FUNCTION = 2 26 | -------------------------------------------------------------------------------- /azure/functions/decorators/timer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import TIMER_TRIGGER 6 | from azure.functions.decorators.core import Trigger, DataType 7 | 8 | 9 | class TimerTrigger(Trigger): 10 | @staticmethod 11 | def get_binding_name() -> str: 12 | return TIMER_TRIGGER 13 | 14 | def __init__(self, 15 | name: str, 16 | schedule: str, 17 | run_on_startup: Optional[bool] = None, 18 | use_monitor: Optional[bool] = None, 19 | data_type: Optional[DataType] = None, 20 | **kwargs) -> None: 21 | self.schedule = schedule 22 | self.run_on_startup = run_on_startup 23 | self.use_monitor = use_monitor 24 | super().__init__(name=name, data_type=data_type) 25 | -------------------------------------------------------------------------------- /azure/functions/decorators/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from .core import Cardinality, AccessRights 4 | from .function_app import FunctionApp, Function, DecoratorApi, DataType, \ 5 | AuthLevel, Blueprint, ExternalHttpFunctionApp, AsgiFunctionApp, \ 6 | WsgiFunctionApp, FunctionRegister, TriggerApi, BindingApi, \ 7 | SettingsApi, BlobSource, McpPropertyType 8 | from .http import HttpMethod 9 | 10 | __all__ = [ 11 | 'FunctionApp', 12 | 'Function', 13 | 'FunctionRegister', 14 | 'DecoratorApi', 15 | 'TriggerApi', 16 | 'BindingApi', 17 | 'SettingsApi', 18 | 'Blueprint', 19 | 'ExternalHttpFunctionApp', 20 | 'AsgiFunctionApp', 21 | 'WsgiFunctionApp', 22 | 'DataType', 23 | 'AuthLevel', 24 | 'Cardinality', 25 | 'AccessRights', 26 | 'HttpMethod', 27 | 'BlobSource', 28 | 'McpPropertyType' 29 | ] 30 | -------------------------------------------------------------------------------- /tests/test_warmup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import unittest 5 | import azure.functions.warmup as warmup 6 | 7 | from azure.functions.meta import Datum 8 | 9 | 10 | class TestWarmup(unittest.TestCase): 11 | def test_warmup_decode(self): 12 | # given 13 | datum: Datum = Datum(value='''''', type='json') 14 | 15 | # when 16 | warmup_context: warmup.WarmUpContext = \ 17 | warmup.WarmUpTriggerConverter.decode(datum, trigger_metadata={}) 18 | 19 | # then 20 | self.assertTrue(isinstance(warmup_context, warmup.WarmUpContext)) 21 | 22 | def test_warmup_input_type(self): 23 | check_input_type = ( 24 | warmup.WarmUpTriggerConverter.check_input_type_annotation 25 | ) 26 | self.assertTrue(check_input_type(warmup.WarmUpContext)) 27 | self.assertFalse(check_input_type(str)) 28 | -------------------------------------------------------------------------------- /azure/functions/decorators/retry_policy.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.core import Setting 6 | 7 | RETRY_POLICY = "retry_policy" 8 | 9 | 10 | class RetryPolicy(Setting): 11 | 12 | def __init__(self, 13 | strategy: str, 14 | max_retry_count: str, 15 | delay_interval: Optional[str] = None, 16 | minimum_interval: Optional[str] = None, 17 | maximum_interval: Optional[str] = None, 18 | **kwargs): 19 | self.strategy = strategy 20 | self.max_retry_count = max_retry_count 21 | self.delay_interval = delay_interval 22 | self.minimum_interval = minimum_interval 23 | self.maximum_interval = maximum_interval 24 | super().__init__(setting_name=RETRY_POLICY) 25 | -------------------------------------------------------------------------------- /tests/decorators/test_warmup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import WARMUP_TRIGGER 6 | from azure.functions.decorators.core import BindingDirection, DataType 7 | from azure.functions.decorators.warmup import WarmUpTrigger 8 | 9 | 10 | class TestWarmUp(unittest.TestCase): 11 | def test_warmup_trigger_valid_creation(self): 12 | trigger = WarmUpTrigger(name="req", 13 | data_type=DataType.UNDEFINED, 14 | dummy_field="dummy") 15 | 16 | self.assertEqual(trigger.get_binding_name(), "warmupTrigger") 17 | self.assertEqual(trigger.get_dict_repr(), { 18 | "type": WARMUP_TRIGGER, 19 | "direction": BindingDirection.IN, 20 | 'dummyField': 'dummy', 21 | "name": "req", 22 | "dataType": DataType.UNDEFINED 23 | }) 24 | -------------------------------------------------------------------------------- /eng/templates/official/jobs/build-artifacts.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: "Build" 3 | displayName: 'Build Python SDK' 4 | 5 | pool: 6 | name: 1es-pool-azfunc 7 | image: 1es-ubuntu-22.04 8 | os: linux 9 | 10 | templateContext: 11 | outputParentDirectory: $(Build.ArtifactStagingDirectory) 12 | outputs: 13 | - output: pipelineArtifact 14 | targetPath: $(Build.SourcesDirectory) 15 | artifactName: "azure-functions" 16 | 17 | steps: 18 | - task: UsePythonVersion@0 19 | inputs: 20 | versionSpec: "3.11" 21 | - bash: | 22 | python --version 23 | displayName: 'Check python version' 24 | - bash: | 25 | python -m pip install -U pip 26 | python -m pip install build 27 | python -m build 28 | displayName: 'Build Python SDK' 29 | - bash: | 30 | pip install pip-audit 31 | pip-audit . 32 | displayName: 'Run vulnerability scan' 33 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | # Regexes for lines to exclude from consideration 3 | exclude_lines = 4 | # Have to re-enable the standard pragma 5 | pragma: no cover 6 | def __repr__ 7 | 8 | # Don't complain about missing debug-only code: 9 | if self\.debug 10 | 11 | # Don't complain if tests don't hit defensive assertion code: 12 | raise AssertionError 13 | raise NotImplementedError 14 | 15 | # Don't complain if non-runnable code isn't run: 16 | if 0: 17 | if False: 18 | if __name__ == .__main__.: 19 | 20 | # Not counting some extra lines which don't need to be counted. 21 | print 22 | def setUp 23 | def test_ 24 | def suite 25 | 26 | ignore_errors = True 27 | 28 | omit = 29 | */build/* 30 | */doc/* 31 | */examples/* 32 | */postprocessing/* 33 | */test/* 34 | */setup.py 35 | setup.py 36 | */site-packages/* 37 | azure/functions/_thirdparty/* 38 | */venv/* 39 | */.venv/* 40 | */.env*/* 41 | */.vscode/* 42 | azure/functions/_abc.py -------------------------------------------------------------------------------- /eng/templates/jobs/build.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: "Build" 3 | displayName: 'Build Python SDK' 4 | 5 | strategy: 6 | matrix: 7 | Python310: 8 | PYTHON_VERSION: '3.10' 9 | Python311: 10 | PYTHON_VERSION: '3.11' 11 | Python312: 12 | PYTHON_VERSION: '3.12' 13 | Python313: 14 | PYTHON_VERSION: '3.13' 15 | Python314: 16 | PYTHON_VERSION: '3.14' 17 | 18 | steps: 19 | - task: UsePythonVersion@0 20 | inputs: 21 | versionSpec: $(PYTHON_VERSION) 22 | - bash: | 23 | python --version 24 | displayName: 'Check python version' 25 | - bash: | 26 | python -m pip install -U pip 27 | python -m pip install build 28 | if [[ $(PYTHON_VERSION) == "3.7" ]]; then 29 | python -m pip install importlib_metadata 30 | fi 31 | python -m build 32 | displayName: 'Build Python SDK for $(PYTHON_VERSION)' 33 | - bash: | 34 | pip install pip-audit 35 | pip-audit . 36 | displayName: 'Run vulnerability scan' 37 | condition: ne(variables['PYTHON_VERSION'], '3.7') 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /eng/ci/official-build.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | batch: true 3 | branches: 4 | include: 5 | - dev 6 | - release/* 7 | 8 | # CI only, does not trigger on PRs. 9 | pr: none 10 | 11 | schedules: 12 | - cron: '0 0 * * MON' 13 | displayName: At 12:00 AM, only on Monday 14 | branches: 15 | include: 16 | - dev 17 | always: true 18 | 19 | resources: 20 | repositories: 21 | - repository: 1es 22 | type: git 23 | name: 1ESPipelineTemplates/1ESPipelineTemplates 24 | ref: refs/tags/release 25 | - repository: eng 26 | type: git 27 | name: engineering 28 | ref: refs/tags/release 29 | 30 | variables: 31 | - template: ci/variables/build.yml@eng 32 | - template: ci/variables/cfs.yml@eng 33 | 34 | extends: 35 | template: v1/1ES.Official.PipelineTemplate.yml@1es 36 | parameters: 37 | pool: 38 | name: 1es-pool-azfunc 39 | image: 1es-windows-2022 40 | os: windows 41 | 42 | stages: 43 | - stage: Build 44 | jobs: 45 | - template: /eng/templates/official/jobs/build-artifacts.yml@self 46 | 47 | - stage: RunTests 48 | dependsOn: Build 49 | jobs: 50 | - template: /eng/templates/jobs/ci-tests.yml@self 51 | -------------------------------------------------------------------------------- /tests/decorators/test_timer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import TIMER_TRIGGER 6 | from azure.functions.decorators.core import BindingDirection, DataType 7 | from azure.functions.decorators.timer import TimerTrigger 8 | 9 | 10 | class TestTimer(unittest.TestCase): 11 | def test_timer_trigger_valid_creation(self): 12 | trigger = TimerTrigger(name="req", 13 | schedule="dummy_schedule", 14 | data_type=DataType.UNDEFINED, 15 | run_on_startup=False, 16 | use_monitor=False, 17 | dummy_field="dummy") 18 | 19 | self.assertEqual(trigger.get_binding_name(), "timerTrigger") 20 | self.assertEqual(trigger.get_dict_repr(), { 21 | "type": TIMER_TRIGGER, 22 | "direction": BindingDirection.IN, 23 | 'dummyField': 'dummy', 24 | "name": "req", 25 | "dataType": DataType.UNDEFINED, 26 | "schedule": "dummy_schedule", 27 | "runOnStartup": False, 28 | "useMonitor": False 29 | }) 30 | -------------------------------------------------------------------------------- /eng/ci/public-build.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | batch: true 3 | branches: 4 | include: 5 | - dev 6 | 7 | pr: 8 | branches: 9 | include: 10 | - dev 11 | 12 | schedules: 13 | - cron: '0 0 * * MON' 14 | displayName: At 12:00 AM, only on Monday 15 | branches: 16 | include: 17 | - dev 18 | always: true 19 | 20 | resources: 21 | repositories: 22 | - repository: 1es 23 | type: git 24 | name: 1ESPipelineTemplates/1ESPipelineTemplates 25 | ref: refs/tags/release 26 | 27 | extends: 28 | template: v1/1ES.Unofficial.PipelineTemplate.yml@1es 29 | parameters: 30 | pool: 31 | name: 1es-pool-azfunc-public 32 | image: 1es-windows-2022 33 | os: windows 34 | sdl: 35 | codeql: 36 | compiled: 37 | enabled: true # still only runs for default branch 38 | runSourceLanguagesInSourceAnalysis: true 39 | settings: 40 | skipBuildTagsForGitHubPullRequests: ${{ variables['System.PullRequest.IsFork'] }} 41 | 42 | stages: 43 | - stage: Build 44 | jobs: 45 | - template: /eng/templates/jobs/build.yml@self 46 | 47 | - stage: RunTests 48 | dependsOn: Build 49 | jobs: 50 | - template: /eng/templates/jobs/ci-tests.yml@self 51 | -------------------------------------------------------------------------------- /azure/functions/decorators/queue.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import QUEUE_TRIGGER, QUEUE 6 | from azure.functions.decorators.core import Trigger, OutputBinding, DataType 7 | 8 | 9 | class QueueTrigger(Trigger): 10 | @staticmethod 11 | def get_binding_name() -> str: 12 | return QUEUE_TRIGGER 13 | 14 | def __init__(self, 15 | name: str, 16 | queue_name: str, 17 | connection: str, 18 | data_type: Optional[DataType] = None, 19 | **kwargs): 20 | self.queue_name = queue_name 21 | self.connection = connection 22 | super().__init__(name=name, data_type=data_type) 23 | 24 | 25 | class QueueOutput(OutputBinding): 26 | @staticmethod 27 | def get_binding_name() -> str: 28 | return QUEUE 29 | 30 | def __init__(self, 31 | name: str, 32 | queue_name: str, 33 | connection: str, 34 | data_type: Optional[DataType] = None, 35 | **kwargs): 36 | self.queue_name = queue_name 37 | self.connection = connection 38 | super().__init__(name=name, data_type=data_type) 39 | -------------------------------------------------------------------------------- /azure/functions/decorators/generic.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.core import Trigger, \ 6 | InputBinding, OutputBinding, DataType 7 | 8 | 9 | class GenericInputBinding(InputBinding): 10 | 11 | @staticmethod 12 | def get_binding_name(): 13 | pass 14 | 15 | def __init__(self, 16 | name: str, 17 | type: str, 18 | data_type: Optional[DataType] = None, 19 | **kwargs): 20 | super().__init__(name=name, data_type=data_type, type=type) 21 | 22 | 23 | class GenericOutputBinding(OutputBinding): 24 | 25 | @staticmethod 26 | def get_binding_name(): 27 | pass 28 | 29 | def __init__(self, 30 | name: str, 31 | type: str, 32 | data_type: Optional[DataType] = None, 33 | **kwargs): 34 | super().__init__(name=name, data_type=data_type, type=type) 35 | 36 | 37 | class GenericTrigger(Trigger): 38 | 39 | @staticmethod 40 | def get_binding_name(): 41 | pass 42 | 43 | def __init__(self, 44 | name: str, 45 | type: str, 46 | data_type: Optional[DataType] = None, 47 | **kwargs): 48 | super().__init__(name=name, data_type=data_type, type=type) 49 | -------------------------------------------------------------------------------- /tests/decorators/test_retry_policy.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.retry_policy import RetryPolicy 6 | 7 | 8 | class TestRetryPolicy(unittest.TestCase): 9 | 10 | def test_retry_policy_setting_creation(self): 11 | retry_policy = RetryPolicy(max_retry_count="1", 12 | strategy="fixed", 13 | delay_interval="5") 14 | 15 | self.assertEqual(retry_policy.get_setting_name(), "retry_policy") 16 | self.assertEqual(retry_policy.get_dict_repr(), 17 | {'setting_name': 'retry_policy', 18 | 'strategy': 'fixed', 19 | 'max_retry_count': '1', 20 | 'delay_interval': '5'}) 21 | 22 | retry_policy = RetryPolicy(max_retry_count="1", 23 | strategy="exponential", 24 | minimum_interval="5", 25 | maximum_interval="10") 26 | self.assertEqual(retry_policy.get_dict_repr(), 27 | {'setting_name': 'retry_policy', 28 | 'strategy': 'exponential', 29 | 'minimum_interval': '5', 30 | 'max_retry_count': '1', 31 | 'maximum_interval': '10'}) 32 | -------------------------------------------------------------------------------- /azure/functions/decorators/eventhub.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import EVENT_HUB_TRIGGER, EVENT_HUB 6 | from azure.functions.decorators.core import Trigger, DataType, OutputBinding, \ 7 | Cardinality 8 | 9 | 10 | class EventHubTrigger(Trigger): 11 | 12 | @staticmethod 13 | def get_binding_name() -> str: 14 | return EVENT_HUB_TRIGGER 15 | 16 | def __init__(self, 17 | name: str, 18 | connection: str, 19 | event_hub_name: str, 20 | data_type: Optional[DataType] = None, 21 | cardinality: Optional[Cardinality] = None, 22 | consumer_group: Optional[str] = None, 23 | **kwargs): 24 | self.connection = connection 25 | self.event_hub_name = event_hub_name 26 | self.cardinality = cardinality 27 | self.consumer_group = consumer_group 28 | super().__init__(name=name, data_type=data_type) 29 | 30 | 31 | class EventHubOutput(OutputBinding): 32 | 33 | @staticmethod 34 | def get_binding_name() -> str: 35 | return EVENT_HUB 36 | 37 | def __init__(self, 38 | name: str, 39 | connection: str, 40 | event_hub_name: str, 41 | data_type: Optional[DataType] = None, 42 | **kwargs): 43 | self.connection = connection 44 | self.event_hub_name = event_hub_name 45 | super().__init__(name=name, data_type=data_type) 46 | -------------------------------------------------------------------------------- /tests/utils/testutils.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import List 3 | 4 | from azure.functions.decorators.utils import StringifyEnumJsonEncoder 5 | 6 | 7 | class CollectionBytes: 8 | """The CollectionBytes class is used for generating a mock 9 | 'collection_bytes' meta.Datum in testing. The common usage of it is 10 | new_datum = meta.Datum(type='collection_bytes', 11 | value=CollectionBytes([b'1', b'2'])) 12 | """ 13 | def __init__(self, data: List[bytes]): 14 | self.bytes = data 15 | 16 | 17 | class CollectionString: 18 | """The CollectionString class is used for generating a mock 19 | 'collection_string' meta.Datum in testing. The common usage of it is 20 | new_datum = meta.Datum(type='collection_string', 21 | value=CollectionString(['a', 'b'])) 22 | """ 23 | def __init__(self, data: List[str]): 24 | self.string = data 25 | 26 | 27 | class CollectionSint64: 28 | """The CollectionSint64 class is used for generating a mock 29 | 'collection_sint64' meta.Datum in testing. The common usage of it is 30 | new_datum = meta.Datum(type='collection_sint64', 31 | value=CollectionSint64([1, 2])) 32 | """ 33 | def __init__(self, data: List[int]): 34 | self.sint64 = data 35 | 36 | 37 | def assert_json(self, func, expected_dict): 38 | self.assertEqual(json.dumps(json.loads(str(func)), sort_keys=True, 39 | cls=StringifyEnumJsonEncoder), 40 | json.dumps(expected_dict, sort_keys=True, 41 | cls=StringifyEnumJsonEncoder)) 42 | -------------------------------------------------------------------------------- /azure/functions/decorators/http.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional, Iterable 4 | 5 | from azure.functions.decorators.constants import HTTP_TRIGGER, HTTP_OUTPUT 6 | from azure.functions.decorators.core import AuthLevel, Trigger, \ 7 | OutputBinding, DataType, StringifyEnum 8 | 9 | 10 | class HttpMethod(StringifyEnum): 11 | """All http methods Azure Python function supports.""" 12 | GET = "GET" 13 | POST = "POST" 14 | DELETE = "DELETE" 15 | HEAD = "HEAD" 16 | PATCH = "PATCH" 17 | PUT = "PUT" 18 | OPTIONS = "OPTIONS" 19 | 20 | 21 | class HttpTrigger(Trigger): 22 | @staticmethod 23 | def get_binding_name() -> str: 24 | return HTTP_TRIGGER 25 | 26 | def __init__(self, 27 | name, 28 | methods: Optional[Iterable[HttpMethod]] = None, 29 | data_type: Optional[DataType] = None, 30 | auth_level: Optional[AuthLevel] = None, 31 | route: Optional[str] = None, 32 | **kwargs) -> None: 33 | self.auth_level = auth_level 34 | self.methods = methods 35 | self.route = route 36 | super().__init__(name=name, data_type=data_type) 37 | 38 | 39 | class HttpOutput(OutputBinding): 40 | @staticmethod 41 | def get_binding_name() -> str: 42 | return HTTP_OUTPUT 43 | 44 | def __init__(self, 45 | name: str, 46 | data_type: Optional[DataType] = None, 47 | **kwargs) -> None: 48 | super().__init__(name=name, data_type=data_type) 49 | -------------------------------------------------------------------------------- /azure/functions/_cosmosdb.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import collections 5 | 6 | from . import _abc 7 | from ._jsonutils import json 8 | 9 | 10 | class Document(_abc.Document, collections.UserDict): 11 | """An Azure Document. 12 | 13 | Document objects are ``UserDict`` subclasses and behave like dicts. 14 | """ 15 | 16 | @classmethod 17 | def from_json(cls, json_data: str) -> 'Document': 18 | """Create a Document from a JSON string.""" 19 | return cls.from_dict(json.loads(json_data)) 20 | 21 | @classmethod 22 | def from_dict(cls, dct: dict) -> 'Document': 23 | """Create a Document from a dict object.""" 24 | return cls({k: v for k, v in dct.items()}) 25 | 26 | def to_json(self) -> str: 27 | """Return the JSON representation of the document.""" 28 | return json.dumps(dict(self)) 29 | 30 | def to_dict(self) -> dict: 31 | """Return the document as a dict - directly using self would also work 32 | as Document is ``UserDict`` subclass and behave like dict""" 33 | return dict(self) 34 | 35 | def __getitem__(self, key): 36 | return collections.UserDict.__getitem__(self, key) 37 | 38 | def __setitem__(self, key, value): 39 | return collections.UserDict.__setitem__(self, key, value) 40 | 41 | def __repr__(self) -> str: 42 | return ( 43 | f'' 44 | ) 45 | 46 | 47 | class DocumentList(_abc.DocumentList, collections.UserList): 48 | "A ``UserList`` subclass containing a list of :class:`~Document` objects" 49 | pass 50 | -------------------------------------------------------------------------------- /azure/functions/timer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import typing 5 | 6 | from azure.functions import _abc as azf_abc 7 | from . import meta 8 | from ._jsonutils import json 9 | 10 | 11 | class TimerRequest(azf_abc.TimerRequest): 12 | 13 | def __init__(self, *, past_due: bool = False, 14 | schedule_status: typing.Optional[dict] = None, 15 | schedule: typing.Optional[dict] = None) -> None: 16 | self.__past_due = past_due 17 | self.__schedule_status = schedule_status if schedule_status else {} 18 | self.__schedule = schedule if schedule else {} 19 | 20 | @property 21 | def past_due(self) -> bool: 22 | return self.__past_due 23 | 24 | @property 25 | def schedule_status(self) -> dict: 26 | return self.__schedule_status 27 | 28 | @property 29 | def schedule(self) -> dict: 30 | return self.__schedule 31 | 32 | 33 | class TimerRequestConverter(meta.InConverter, 34 | binding='timerTrigger', trigger=True): 35 | 36 | @classmethod 37 | def check_input_type_annotation(cls, pytype: type) -> bool: 38 | return issubclass(pytype, azf_abc.TimerRequest) 39 | 40 | @classmethod 41 | def decode(cls, data: meta.Datum, *, trigger_metadata) -> typing.Any: 42 | if data.type != 'json': 43 | raise NotImplementedError 44 | 45 | info = json.loads(data.value) 46 | 47 | return TimerRequest( 48 | past_due=info.get('IsPastDue', False), 49 | schedule_status=info.get('ScheduleStatus', {}), 50 | schedule=info.get('Schedule', {})) 51 | -------------------------------------------------------------------------------- /azure/functions/decorators/constants.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | # binding types 5 | COSMOS_DB = "cosmosDB" 6 | COSMOS_DB_TRIGGER = "cosmosDBTrigger" 7 | EVENT_HUB_TRIGGER = "eventHubTrigger" 8 | EVENT_HUB = "eventHub" 9 | HTTP_TRIGGER = "httpTrigger" 10 | HTTP_OUTPUT = "http" 11 | KAFKA = "kafka" 12 | KAFKA_TRIGGER = "kafkaTrigger" 13 | QUEUE = "queue" 14 | QUEUE_TRIGGER = "queueTrigger" 15 | SERVICE_BUS = "serviceBus" 16 | SERVICE_BUS_TRIGGER = "serviceBusTrigger" 17 | TIMER_TRIGGER = "timerTrigger" 18 | WARMUP_TRIGGER = "warmupTrigger" 19 | BLOB_TRIGGER = "blobTrigger" 20 | BLOB = "blob" 21 | EVENT_GRID_TRIGGER = "eventGridTrigger" 22 | EVENT_GRID = "eventGrid" 23 | TABLE = "table" 24 | SQL = "sql" 25 | SQL_TRIGGER = "sqlTrigger" 26 | DAPR_SERVICE_INVOCATION_TRIGGER = "daprServiceInvocationTrigger" 27 | DAPR_BINDING_TRIGGER = "daprBindingTrigger" 28 | DAPR_TOPIC_TRIGGER = "daprTopicTrigger" 29 | DAPR_STATE = "daprState" 30 | DAPR_SECRET = "daprSecret" 31 | DAPR_PUBLISH = "daprPublish" 32 | DAPR_INVOKE = "daprInvoke" 33 | DAPR_BINDING = "daprBinding" 34 | ORCHESTRATION_TRIGGER = "orchestrationTrigger" 35 | ACTIVITY_TRIGGER = "activityTrigger" 36 | ENTITY_TRIGGER = "entityTrigger" 37 | DURABLE_CLIENT = "durableClient" 38 | ASSISTANT_SKILL_TRIGGER = "assistantSkillTrigger" 39 | TEXT_COMPLETION = "textCompletion" 40 | ASSISTANT_QUERY = "assistantQuery" 41 | EMBEDDINGS = "embeddings" 42 | EMBEDDINGS_STORE = "embeddingsStore" 43 | ASSISTANT_CREATE = "assistantCreate" 44 | ASSISTANT_POST = "assistantPost" 45 | SEMANTIC_SEARCH = "semanticSearch" 46 | MYSQL = "mysql" 47 | MYSQL_TRIGGER = "mysqlTrigger" 48 | MCP_TOOL_TRIGGER = "mcpToolTrigger" 49 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools >= 61.0", "wheel", "build"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "azure-functions" 7 | dynamic = ["version"] 8 | requires-python = ">=3.10" 9 | authors = [{ name = "Azure Functions team at Microsoft Corp.", email = "azurefunctions@microsoft.com" }] 10 | description = "Python library for Azure Functions." 11 | readme = "README.md" 12 | license = { text = "MIT License" } 13 | classifiers = [ 14 | 'License :: OSI Approved :: MIT License', 15 | 'Intended Audience :: Developers', 16 | 'Programming Language :: Python :: 3', 17 | 'Programming Language :: Python :: 3.10', 18 | 'Programming Language :: Python :: 3.11', 19 | 'Programming Language :: Python :: 3.12', 20 | 'Programming Language :: Python :: 3.13', 21 | 'Programming Language :: Python :: 3.14', 22 | 'Operating System :: Microsoft :: Windows', 23 | 'Operating System :: POSIX', 24 | 'Operating System :: MacOS :: MacOS X', 25 | 'Environment :: Web Environment', 26 | 'Development Status :: 5 - Production/Stable', 27 | ] 28 | dependencies = [ 29 | 'werkzeug~=3.1.3; python_version >= "3.9"', 30 | 'werkzeug~=3.0.6; python_version == "3.8"', 31 | 'werkzeug; python_version < "3.8"' 32 | ] 33 | [project.optional-dependencies] 34 | dev = [ 35 | 'pytest', 36 | 'pytest-cov', 37 | 'coverage', 38 | 'pytest-instafail', 39 | 'pre-commit', 40 | 'azure-functions-durable', 41 | 'flake8~=4.0.1; python_version < "3.11"', 42 | 'flake8~=7.1.1; python_version >= "3.11"', 43 | 'flake8-docstrings' 44 | ] 45 | 46 | [tool.setuptools.packages.find] 47 | exclude = [ 48 | 'azure', 'eng', 'docs', 'tests*' 49 | ] 50 | 51 | [tool.setuptools.dynamic] 52 | version = {attr = "azure.functions.__version__"} 53 | -------------------------------------------------------------------------------- /azure/functions/decorators/table.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import TABLE 6 | from azure.functions.decorators.core import DataType, OutputBinding, \ 7 | InputBinding 8 | 9 | 10 | class TableInput(InputBinding): 11 | 12 | @staticmethod 13 | def get_binding_name() -> str: 14 | return TABLE 15 | 16 | def __init__(self, 17 | name: str, 18 | connection: str, 19 | table_name: str, 20 | row_key: Optional[str] = None, 21 | partition_key: Optional[str] = None, 22 | take: Optional[int] = None, 23 | filter: Optional[str] = None, 24 | data_type: Optional[DataType] = None): 25 | self.connection = connection 26 | self.table_name = table_name 27 | self.row_key = row_key 28 | self.partition_key = partition_key 29 | self.take = take 30 | self.filter = filter 31 | super().__init__(name=name, data_type=data_type) 32 | 33 | 34 | class TableOutput(OutputBinding): 35 | 36 | @staticmethod 37 | def get_binding_name() -> str: 38 | return TABLE 39 | 40 | def __init__(self, 41 | name: str, 42 | connection: str, 43 | table_name: str, 44 | row_key: Optional[str] = None, 45 | partition_key: Optional[str] = None, 46 | data_type: Optional[DataType] = None): 47 | self.connection = connection 48 | self.table_name = table_name 49 | self.row_key = row_key 50 | self.partition_key = partition_key 51 | super().__init__(name=name, data_type=data_type) 52 | -------------------------------------------------------------------------------- /azure/functions/decorators/eventgrid.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import EVENT_GRID, EVENT_GRID_TRIGGER 6 | from azure.functions.decorators.core import Trigger, DataType, OutputBinding 7 | 8 | 9 | class EventGridTrigger(Trigger): 10 | 11 | @staticmethod 12 | def get_binding_name() -> str: 13 | return EVENT_GRID_TRIGGER 14 | 15 | def __init__(self, 16 | name: str, 17 | data_type: Optional[DataType] = None, 18 | **kwargs): 19 | super().__init__(name=name, data_type=data_type) 20 | 21 | 22 | class EventGridOutput(OutputBinding): 23 | 24 | @staticmethod 25 | def get_binding_name() -> str: 26 | return EVENT_GRID 27 | 28 | def __init__(self, 29 | name: str, 30 | topic_endpoint_uri: Optional[str] = None, 31 | topic_key_setting: Optional[str] = None, 32 | connection: Optional[str] = None, 33 | data_type: Optional[DataType] = None, 34 | **kwargs): 35 | if (connection is not None and ( 36 | topic_endpoint_uri is not None 37 | or topic_key_setting is not None)) or \ 38 | (connection is None and ( 39 | topic_endpoint_uri is None 40 | or topic_key_setting is None)): 41 | raise ValueError( 42 | "Specify either the 'Connection' property or both " 43 | "'TopicKeySetting' and 'TopicEndpointUri' properties," 44 | " but not both.") 45 | 46 | self.topic_endpoint_uri = topic_endpoint_uri 47 | self.topic_key_setting = topic_key_setting 48 | self.connection = connection 49 | super().__init__(name=name, data_type=data_type) 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # virtualenv 83 | .env*/ 84 | .venv*/ 85 | venv*/ 86 | env/ 87 | py3env/ 88 | pyvenv*/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | 103 | 104 | .testconfig 105 | .pytest_cache 106 | 107 | # mac osx specific files 108 | .DS_Store 109 | 110 | # PyCharm related files 111 | .idea/ 112 | .idea_modules/ 113 | 114 | # vscode setting 115 | .vscode/ 116 | -------------------------------------------------------------------------------- /tests/decorators/test_queue.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import QUEUE_TRIGGER, QUEUE 6 | from azure.functions.decorators.core import BindingDirection, DataType 7 | from azure.functions.decorators.queue import QueueTrigger, QueueOutput 8 | 9 | 10 | class TestQueue(unittest.TestCase): 11 | def test_queue_trigger_valid_creation(self): 12 | trigger = QueueTrigger(name="req", 13 | queue_name="dummy_queue", 14 | connection="dummy_connection", 15 | data_type=DataType.UNDEFINED, 16 | dummy_field="dummy") 17 | 18 | self.assertEqual(trigger.get_binding_name(), "queueTrigger") 19 | self.assertEqual(trigger.get_dict_repr(), { 20 | "type": QUEUE_TRIGGER, 21 | "direction": BindingDirection.IN, 22 | 'dummyField': 'dummy', 23 | "name": "req", 24 | "dataType": DataType.UNDEFINED, 25 | "queueName": "dummy_queue", 26 | "connection": "dummy_connection" 27 | }) 28 | 29 | def test_queue_output_valid_creation(self): 30 | output = QueueOutput(name="res", 31 | queue_name="dummy_queue_out", 32 | connection="dummy_connection", 33 | data_type=DataType.UNDEFINED, 34 | dummy_field="dummy") 35 | 36 | self.assertEqual(output.get_binding_name(), "queue") 37 | self.assertEqual(output.get_dict_repr(), { 38 | "type": QUEUE, 39 | "direction": BindingDirection.OUT, 40 | 'dummyField': 'dummy', 41 | "name": "res", 42 | "dataType": DataType.UNDEFINED, 43 | "queueName": "dummy_queue_out", 44 | "connection": "dummy_connection" 45 | }) 46 | -------------------------------------------------------------------------------- /azure/functions/decorators/blob.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import BLOB_TRIGGER, BLOB 6 | from azure.functions.decorators.core import BlobSource, Trigger, \ 7 | OutputBinding, DataType, InputBinding 8 | 9 | 10 | class BlobTrigger(Trigger): 11 | def __init__(self, 12 | name: str, 13 | path: str, 14 | connection: str, 15 | source: Optional[BlobSource] = None, 16 | data_type: Optional[DataType] = None, 17 | **kwargs): 18 | self.path = path 19 | self.connection = connection 20 | if isinstance(source, BlobSource): 21 | self.source = source.value 22 | else: 23 | self.source = source # type: ignore 24 | super().__init__(name=name, data_type=data_type) 25 | 26 | @staticmethod 27 | def get_binding_name() -> str: 28 | return BLOB_TRIGGER 29 | 30 | 31 | class BlobInput(InputBinding): 32 | def __init__(self, 33 | name: str, 34 | path: str, 35 | connection: str, 36 | data_type: Optional[DataType] = None, 37 | **kwargs): 38 | self.path = path 39 | self.connection = connection 40 | super().__init__(name=name, data_type=data_type) 41 | 42 | @staticmethod 43 | def get_binding_name() -> str: 44 | return BLOB 45 | 46 | 47 | class BlobOutput(OutputBinding): 48 | def __init__(self, 49 | name: str, 50 | path: str, 51 | connection: str, 52 | data_type: Optional[DataType] = None, 53 | **kwargs): 54 | self.path = path 55 | self.connection = connection 56 | super().__init__(name=name, data_type=data_type) 57 | 58 | @staticmethod 59 | def get_binding_name() -> str: 60 | return BLOB 61 | -------------------------------------------------------------------------------- /azure/functions/_jsonutils.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from abc import ABC, abstractmethod 5 | from typing import Any, Union 6 | from types import SimpleNamespace 7 | 8 | 9 | """ 10 | Azure Functions JSON utilities. 11 | This module provides a JSON interface that can be used to serialize and 12 | deserialize objects to and from JSON format. It supports both the `orjson` 13 | and the standard `json` libraries, falling back to the standard library 14 | if `orjson` is not available (installed). 15 | """ 16 | 17 | 18 | try: 19 | import orjson as _orjson 20 | except ImportError: 21 | _orjson = None 22 | 23 | # Standard library is always present 24 | import json as _std_json 25 | 26 | 27 | class JsonInterface(ABC): 28 | @abstractmethod 29 | def dumps(self, obj: Any) -> str: 30 | pass 31 | 32 | @abstractmethod 33 | def loads(self, s: Union[str, bytes, bytearray]) -> Any: 34 | pass 35 | 36 | 37 | class OrJsonAdapter(JsonInterface): 38 | def __init__(self): 39 | assert _orjson is not None 40 | self.orjson = _orjson 41 | 42 | def dumps(self, obj: Any) -> str: 43 | # orjson.dumps returns bytes, decode to str 44 | return self.orjson.dumps(obj).decode("utf-8") 45 | 46 | def loads(self, s: Union[str, bytes, bytearray]) -> Any: 47 | return self.orjson.loads(s) 48 | 49 | 50 | class StdJsonAdapter(JsonInterface): 51 | def __init__(self): 52 | self.json = _std_json 53 | 54 | def dumps(self, obj: Any) -> str: 55 | return self.json.dumps(obj) 56 | 57 | def loads(self, s: Union[str, bytes, bytearray]) -> Any: 58 | return self.json.loads(s) 59 | 60 | 61 | if _orjson is not None: 62 | json_impl = OrJsonAdapter() 63 | else: 64 | json_impl = StdJsonAdapter() 65 | 66 | 67 | def dumps(obj: Any) -> str: 68 | return json_impl.dumps(obj) 69 | 70 | 71 | def loads(s: Union[str, bytes, bytearray]) -> Any: 72 | return json_impl.loads(s) 73 | 74 | 75 | json = SimpleNamespace( 76 | dumps=dumps, 77 | loads=loads 78 | ) 79 | -------------------------------------------------------------------------------- /azure/functions/_sql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import abc 4 | import collections 5 | 6 | from ._jsonutils import json 7 | 8 | 9 | class BaseSqlRow(abc.ABC): 10 | 11 | @classmethod 12 | @abc.abstractmethod 13 | def from_json(cls, json_data: str) -> 'BaseSqlRow': 14 | raise NotImplementedError 15 | 16 | @classmethod 17 | @abc.abstractmethod 18 | def from_dict(cls, dct: dict) -> 'BaseSqlRow': 19 | raise NotImplementedError 20 | 21 | @abc.abstractmethod 22 | def __getitem__(self, key): 23 | raise NotImplementedError 24 | 25 | @abc.abstractmethod 26 | def __setitem__(self, key, value): 27 | raise NotImplementedError 28 | 29 | @abc.abstractmethod 30 | def to_json(self) -> str: 31 | raise NotImplementedError 32 | 33 | 34 | class BaseSqlRowList(abc.ABC): 35 | pass 36 | 37 | 38 | class SqlRow(BaseSqlRow, collections.UserDict): 39 | """A SQL Row. 40 | 41 | SqlRow objects are ''UserDict'' subclasses and behave like dicts. 42 | """ 43 | 44 | @classmethod 45 | def from_json(cls, json_data: str) -> 'BaseSqlRow': 46 | """Create a SqlRow from a JSON string.""" 47 | return cls.from_dict(json.loads(json_data)) 48 | 49 | @classmethod 50 | def from_dict(cls, dct: dict) -> 'BaseSqlRow': 51 | """Create a SqlRow from a dict object""" 52 | return cls({k: v for k, v in dct.items()}) 53 | 54 | def to_json(self) -> str: 55 | """Return the JSON representation of the SqlRow""" 56 | return json.dumps(dict(self)) 57 | 58 | def __getitem__(self, key): 59 | return collections.UserDict.__getitem__(self, key) 60 | 61 | def __setitem__(self, key, value): 62 | return collections.UserDict.__setitem__(self, key, value) 63 | 64 | def __repr__(self) -> str: 65 | return ( 66 | f'' 67 | ) 68 | 69 | 70 | class SqlRowList(BaseSqlRowList, collections.UserList): 71 | "A ''UserList'' subclass containing a list of :class:'~SqlRow' objects" 72 | pass 73 | -------------------------------------------------------------------------------- /azure/functions/_mysql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import abc 4 | import collections 5 | 6 | from ._jsonutils import json 7 | 8 | 9 | class BaseMySqlRow(abc.ABC): 10 | 11 | @classmethod 12 | @abc.abstractmethod 13 | def from_json(cls, json_data: str) -> 'BaseMySqlRow': 14 | raise NotImplementedError 15 | 16 | @classmethod 17 | @abc.abstractmethod 18 | def from_dict(cls, dct: dict) -> 'BaseMySqlRow': 19 | raise NotImplementedError 20 | 21 | @abc.abstractmethod 22 | def __getitem__(self, key): 23 | raise NotImplementedError 24 | 25 | @abc.abstractmethod 26 | def __setitem__(self, key, value): 27 | raise NotImplementedError 28 | 29 | @abc.abstractmethod 30 | def to_json(self) -> str: 31 | raise NotImplementedError 32 | 33 | 34 | class BaseMySqlRowList(abc.ABC): 35 | pass 36 | 37 | 38 | class MySqlRow(BaseMySqlRow, collections.UserDict): 39 | """A MySql Row. 40 | 41 | MySqlRow objects are ''UserDict'' subclasses and behave like dicts. 42 | """ 43 | 44 | @classmethod 45 | def from_json(cls, json_data: str) -> 'BaseMySqlRow': 46 | """Create a MySqlRow from a JSON string.""" 47 | return cls.from_dict(json.loads(json_data)) 48 | 49 | @classmethod 50 | def from_dict(cls, dct: dict) -> 'BaseMySqlRow': 51 | """Create a MySqlRow from a dict object""" 52 | return cls({k: v for k, v in dct.items()}) 53 | 54 | def to_json(self) -> str: 55 | """Return the JSON representation of the MySqlRow""" 56 | return json.dumps(dict(self)) 57 | 58 | def __getitem__(self, key): 59 | return collections.UserDict.__getitem__(self, key) 60 | 61 | def __setitem__(self, key, value): 62 | return collections.UserDict.__setitem__(self, key, value) 63 | 64 | def __repr__(self) -> str: 65 | return ( 66 | f'' 67 | ) 68 | 69 | 70 | class MySqlRowList(BaseMySqlRowList, collections.UserList): 71 | "A ''UserList'' subclass containing a list of :class:'~MySqlRow' objects" 72 | pass 73 | -------------------------------------------------------------------------------- /azure/functions/mcp.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import typing 4 | 5 | from . import meta 6 | 7 | 8 | # MCP-specific context object 9 | class MCPToolContext(typing.Dict[str, typing.Any]): 10 | """Injected context object for MCP tool triggers.""" 11 | 12 | pass 13 | 14 | 15 | class MCPToolTriggerConverter(meta.InConverter, binding='mcpToolTrigger', 16 | trigger=True): 17 | 18 | @classmethod 19 | def check_input_type_annotation(cls, pytype: type) -> bool: 20 | return issubclass(pytype, (str, dict, bytes, MCPToolContext)) 21 | 22 | @classmethod 23 | def has_implicit_output(cls) -> bool: 24 | return True 25 | 26 | @classmethod 27 | def decode(cls, data: meta.Datum, *, trigger_metadata): 28 | """ 29 | Decode incoming MCP tool request data. 30 | Returns the raw data in its native format (string, dict, bytes). 31 | """ 32 | # Handle different data types appropriately 33 | if data.type == 'json': 34 | # If it's already parsed JSON, use the value directly 35 | return data.value 36 | elif data.type == 'string': 37 | # If it's a string, use it as-is 38 | return data.value 39 | elif data.type == 'bytes': 40 | return data.value 41 | else: 42 | # Fallback to python_value for other types 43 | return data.python_value if hasattr(data, 'python_value') else data.value 44 | 45 | @classmethod 46 | def encode(cls, obj: typing.Any, *, expected_type: typing.Optional[type] = None): 47 | """ 48 | Encode the return value from MCP tool functions. 49 | MCP tools typically return string responses. 50 | """ 51 | if obj is None: 52 | return meta.Datum(type='string', value='') 53 | elif isinstance(obj, str): 54 | return meta.Datum(type='string', value=obj) 55 | elif isinstance(obj, (bytes, bytearray)): 56 | return meta.Datum(type='bytes', value=bytes(obj)) 57 | else: 58 | # Convert other types to string 59 | return meta.Datum(type='string', value=str(obj)) 60 | -------------------------------------------------------------------------------- /tests/decorators/test_http.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import HTTP_TRIGGER, HTTP_OUTPUT 6 | from azure.functions.decorators.core import BindingDirection, DataType, \ 7 | AuthLevel 8 | from azure.functions.decorators.http import HttpTrigger, HttpOutput, \ 9 | HttpMethod 10 | 11 | 12 | class TestHttp(unittest.TestCase): 13 | def test_http_method_enum(self): 14 | self.assertEqual([e for e in HttpMethod], 15 | [HttpMethod.GET, HttpMethod.POST, HttpMethod.DELETE, 16 | HttpMethod.HEAD, HttpMethod.PATCH, HttpMethod.PUT, 17 | HttpMethod.OPTIONS]) 18 | 19 | def test_http_trigger_valid_creation_with_methods(self): 20 | http_trigger = HttpTrigger(name='req', 21 | methods=[HttpMethod.GET, HttpMethod.POST], 22 | data_type=DataType.UNDEFINED, 23 | auth_level=AuthLevel.ANONYMOUS, 24 | route='dummy', 25 | dummy_field="dummy") 26 | 27 | self.assertEqual(http_trigger.get_binding_name(), HTTP_TRIGGER) 28 | self.assertEqual(http_trigger.get_dict_repr(), { 29 | "authLevel": AuthLevel.ANONYMOUS, 30 | "type": HTTP_TRIGGER, 31 | "direction": BindingDirection.IN, 32 | 'dummyField': 'dummy', 33 | "name": 'req', 34 | "dataType": DataType.UNDEFINED, 35 | "route": 'dummy', 36 | "methods": [HttpMethod.GET, HttpMethod.POST] 37 | }) 38 | 39 | def test_http_output_valid_creation(self): 40 | http_output = HttpOutput(name='req', data_type=DataType.UNDEFINED, 41 | dummy_field="dummy") 42 | 43 | self.assertEqual(http_output.get_binding_name(), HTTP_OUTPUT) 44 | self.assertEqual(http_output.get_dict_repr(), { 45 | "type": HTTP_OUTPUT, 46 | "direction": BindingDirection.OUT, 47 | 'dummyField': 'dummy', 48 | "name": "req", 49 | "dataType": DataType.UNDEFINED, 50 | }) 51 | -------------------------------------------------------------------------------- /azure/functions/decorators/sql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import SQL, SQL_TRIGGER 6 | from azure.functions.decorators.core import DataType, InputBinding, \ 7 | OutputBinding, Trigger 8 | 9 | 10 | class SqlInput(InputBinding): 11 | @staticmethod 12 | def get_binding_name() -> str: 13 | return SQL 14 | 15 | def __init__(self, 16 | name: str, 17 | command_text: str, 18 | connection_string_setting: str, 19 | command_type: Optional[str] = 'Text', 20 | parameters: Optional[str] = None, 21 | data_type: Optional[DataType] = None, 22 | **kwargs): 23 | self.command_text = command_text 24 | self.connection_string_setting = connection_string_setting 25 | self.command_type = command_type 26 | self.parameters = parameters 27 | super().__init__(name=name, data_type=data_type) 28 | 29 | 30 | class SqlOutput(OutputBinding): 31 | @staticmethod 32 | def get_binding_name() -> str: 33 | return SQL 34 | 35 | def __init__(self, 36 | name: str, 37 | command_text: str, 38 | connection_string_setting: str, 39 | data_type: Optional[DataType] = None, 40 | **kwargs): 41 | self.command_text = command_text 42 | self.connection_string_setting = connection_string_setting 43 | super().__init__(name=name, data_type=data_type) 44 | 45 | 46 | class SqlTrigger(Trigger): 47 | @staticmethod 48 | def get_binding_name() -> str: 49 | return SQL_TRIGGER 50 | 51 | def __init__(self, 52 | name: str, 53 | table_name: str, 54 | connection_string_setting: str, 55 | leases_table_name: Optional[str] = None, 56 | data_type: Optional[DataType] = None, 57 | **kwargs): 58 | self.table_name = table_name 59 | self.connection_string_setting = connection_string_setting 60 | self.leases_table_name = leases_table_name 61 | super().__init__(name=name, data_type=data_type) 62 | -------------------------------------------------------------------------------- /azure/functions/decorators/mysql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import MYSQL, MYSQL_TRIGGER 6 | from azure.functions.decorators.core import DataType, InputBinding, \ 7 | OutputBinding, Trigger 8 | 9 | 10 | class MySqlInput(InputBinding): 11 | @staticmethod 12 | def get_binding_name() -> str: 13 | return MYSQL 14 | 15 | def __init__(self, 16 | name: str, 17 | command_text: str, 18 | connection_string_setting: str, 19 | command_type: Optional[str] = 'Text', 20 | parameters: Optional[str] = None, 21 | data_type: Optional[DataType] = None, 22 | **kwargs): 23 | self.command_text = command_text 24 | self.connection_string_setting = connection_string_setting 25 | self.command_type = command_type 26 | self.parameters = parameters 27 | super().__init__(name=name, data_type=data_type) 28 | 29 | 30 | class MySqlOutput(OutputBinding): 31 | @staticmethod 32 | def get_binding_name() -> str: 33 | return MYSQL 34 | 35 | def __init__(self, 36 | name: str, 37 | command_text: str, 38 | connection_string_setting: str, 39 | data_type: Optional[DataType] = None, 40 | **kwargs): 41 | self.command_text = command_text 42 | self.connection_string_setting = connection_string_setting 43 | super().__init__(name=name, data_type=data_type) 44 | 45 | 46 | class MySqlTrigger(Trigger): 47 | @staticmethod 48 | def get_binding_name() -> str: 49 | return MYSQL_TRIGGER 50 | 51 | def __init__(self, 52 | name: str, 53 | table_name: str, 54 | connection_string_setting: str, 55 | leases_table_name: Optional[str] = None, 56 | data_type: Optional[DataType] = None, 57 | **kwargs): 58 | self.table_name = table_name 59 | self.connection_string_setting = connection_string_setting 60 | self.leases_table_name = leases_table_name 61 | super().__init__(name=name, data_type=data_type) 62 | -------------------------------------------------------------------------------- /tests/decorators/test_table.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | from azure.functions.decorators.constants import TABLE 5 | from azure.functions.decorators.table import TableInput, TableOutput 6 | from azure.functions.decorators.core import BindingDirection, DataType 7 | 8 | 9 | class TestTable(unittest.TestCase): 10 | def test_table_input_valid_creation(self): 11 | table_input = TableInput(name="in", 12 | table_name="dummy_table_name", 13 | connection="dummy_in_conn", 14 | row_key="dummy_key", 15 | partition_key="dummy_partition_key", 16 | take=1, 17 | filter="dummy_filter", 18 | data_type=DataType.UNDEFINED) 19 | 20 | self.assertEqual(table_input.get_binding_name(), TABLE) 21 | self.assertEqual(table_input.get_dict_repr(), { 22 | "direction": BindingDirection.IN, 23 | "dataType": DataType.UNDEFINED, 24 | "type": "table", 25 | "name": "in", 26 | "tableName": "dummy_table_name", 27 | "connection": "dummy_in_conn", 28 | "rowKey": "dummy_key", 29 | "partitionKey": "dummy_partition_key", 30 | "take": 1, 31 | "filter": "dummy_filter" 32 | }) 33 | 34 | def test_table_output_valid_creation(self): 35 | table_output = TableOutput(name="out", 36 | table_name="dummy_table_name", 37 | row_key="dummy_key", 38 | partition_key="dummy_partition_key", 39 | connection="dummy_out_conn", 40 | data_type=DataType.UNDEFINED) 41 | 42 | self.assertEqual(table_output.get_binding_name(), TABLE) 43 | self.assertEqual(table_output.get_dict_repr(), { 44 | "direction": BindingDirection.OUT, 45 | "dataType": DataType.UNDEFINED, 46 | "type": "table", 47 | "name": "out", 48 | "tableName": "dummy_table_name", 49 | "connection": "dummy_out_conn", 50 | "rowKey": "dummy_key", 51 | "partitionKey": "dummy_partition_key" 52 | }) 53 | -------------------------------------------------------------------------------- /tests/decorators/test_eventhub.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import EVENT_HUB_TRIGGER, EVENT_HUB 6 | from azure.functions.decorators.core import BindingDirection, Cardinality, \ 7 | DataType 8 | from azure.functions.decorators.eventhub import EventHubTrigger, EventHubOutput 9 | 10 | 11 | class TestEventHub(unittest.TestCase): 12 | def test_event_hub_trigger_valid_creation(self): 13 | trigger = EventHubTrigger(name="req", 14 | connection="dummy_connection", 15 | event_hub_name="dummy_event_hub", 16 | cardinality=Cardinality.ONE, 17 | consumer_group="dummy_group", 18 | data_type=DataType.UNDEFINED, 19 | dummy_field="dummy") 20 | 21 | self.assertEqual(trigger.get_binding_name(), "eventHubTrigger") 22 | self.assertEqual(trigger.get_dict_repr(), 23 | {"cardinality": Cardinality.ONE, 24 | "connection": "dummy_connection", 25 | "consumerGroup": "dummy_group", 26 | "dataType": DataType.UNDEFINED, 27 | "direction": BindingDirection.IN, 28 | 'dummyField': 'dummy', 29 | "eventHubName": "dummy_event_hub", 30 | "name": "req", 31 | "type": EVENT_HUB_TRIGGER}) 32 | 33 | def test_event_hub_output_valid_creation(self): 34 | output = EventHubOutput(name="res", 35 | event_hub_name="dummy_event_hub", 36 | connection="dummy_connection", 37 | data_type=DataType.UNDEFINED, 38 | dummy_field="dummy") 39 | 40 | self.assertEqual(output.get_binding_name(), "eventHub") 41 | self.assertEqual(output.get_dict_repr(), 42 | {'connection': 'dummy_connection', 43 | 'dataType': DataType.UNDEFINED, 44 | 'direction': BindingDirection.OUT, 45 | 'dummyField': 'dummy', 46 | 'eventHubName': 'dummy_event_hub', 47 | 'name': 'res', 48 | 'type': EVENT_HUB}) 49 | -------------------------------------------------------------------------------- /azure/functions/sql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import collections.abc 5 | import typing 6 | 7 | from azure.functions import _sql as sql 8 | 9 | from . import meta 10 | from ._jsonutils import json 11 | 12 | 13 | class SqlConverter(meta.InConverter, meta.OutConverter, 14 | binding='sql'): 15 | 16 | @classmethod 17 | def check_input_type_annotation(cls, pytype: type) -> bool: 18 | return issubclass(pytype, sql.BaseSqlRowList) 19 | 20 | @classmethod 21 | def check_output_type_annotation(cls, pytype: type) -> bool: 22 | return issubclass(pytype, (sql.BaseSqlRowList, sql.BaseSqlRow)) 23 | 24 | @classmethod 25 | def decode(cls, 26 | data: meta.Datum, 27 | *, 28 | trigger_metadata) -> typing.Optional[sql.SqlRowList]: 29 | if data is None or data.type is None: 30 | return None 31 | 32 | data_type = data.type 33 | 34 | if data_type in ['string', 'json']: 35 | body = data.value 36 | 37 | elif data_type == 'bytes': 38 | body = data.value.decode('utf-8') 39 | 40 | else: 41 | raise NotImplementedError( 42 | f'Unsupported payload type: {data_type}') 43 | 44 | rows = json.loads(body) 45 | if not isinstance(rows, list): 46 | rows = [rows] 47 | 48 | return sql.SqlRowList( 49 | (None if row is None else sql.SqlRow.from_dict(row)) 50 | for row in rows) 51 | 52 | @classmethod 53 | def encode(cls, obj: typing.Any, *, 54 | expected_type: typing.Optional[type]) -> meta.Datum: 55 | if isinstance(obj, sql.SqlRow): 56 | data = sql.SqlRowList([obj]) 57 | 58 | elif isinstance(obj, sql.SqlRowList): 59 | data = obj 60 | 61 | elif isinstance(obj, collections.abc.Iterable): 62 | data = sql.SqlRowList() 63 | 64 | for row in obj: 65 | if not isinstance(row, sql.SqlRow): 66 | raise NotImplementedError( 67 | f'Unsupported list type: {type(obj)}, \ 68 | lists must contain SqlRow objects') 69 | else: 70 | data.append(row) 71 | 72 | else: 73 | raise NotImplementedError(f'Unsupported type: {type(obj)}') 74 | 75 | return meta.Datum( 76 | type='json', 77 | value=json.dumps([dict(d) for d in data]) 78 | ) 79 | -------------------------------------------------------------------------------- /azure/functions/cosmosdb.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import collections.abc 5 | import typing 6 | 7 | from azure.functions import _cosmosdb as cdb 8 | from ._jsonutils import json 9 | 10 | from . import meta 11 | 12 | 13 | class CosmosDBConverter(meta.InConverter, meta.OutConverter, 14 | binding='cosmosDB'): 15 | 16 | @classmethod 17 | def check_input_type_annotation(cls, pytype: type) -> bool: 18 | return issubclass(pytype, cdb.DocumentList) 19 | 20 | @classmethod 21 | def check_output_type_annotation(cls, pytype: type) -> bool: 22 | return issubclass(pytype, (cdb.DocumentList, cdb.Document)) 23 | 24 | @classmethod 25 | def decode(cls, 26 | data: meta.Datum, 27 | *, 28 | trigger_metadata) -> typing.Optional[cdb.DocumentList]: 29 | if data is None or data.type is None: 30 | return None 31 | 32 | data_type = data.type 33 | 34 | if data_type in ['string', 'json']: 35 | body = data.value 36 | 37 | elif data_type == 'bytes': 38 | body = data.value.decode('utf-8') 39 | 40 | else: 41 | raise NotImplementedError( 42 | f'unsupported queue payload type: {data_type}') 43 | 44 | documents = json.loads(body) 45 | if not isinstance(documents, list): 46 | documents = [documents] 47 | 48 | return cdb.DocumentList( 49 | (None if doc is None else cdb.Document.from_dict(doc)) 50 | for doc in documents) 51 | 52 | @classmethod 53 | def encode(cls, obj: typing.Any, *, 54 | expected_type: typing.Optional[type]) -> meta.Datum: 55 | if isinstance(obj, cdb.Document): 56 | data = cdb.DocumentList([obj]) 57 | 58 | elif isinstance(obj, cdb.DocumentList): 59 | data = obj 60 | 61 | elif isinstance(obj, collections.abc.Iterable): 62 | data = cdb.DocumentList() 63 | 64 | for doc in obj: 65 | if not isinstance(doc, cdb.Document): 66 | raise NotImplementedError 67 | else: 68 | data.append(doc) 69 | 70 | else: 71 | raise NotImplementedError 72 | 73 | return meta.Datum( 74 | type='json', 75 | value=json.dumps([dict(d) for d in data]) 76 | ) 77 | 78 | 79 | class CosmosDBTriggerConverter(CosmosDBConverter, 80 | binding='cosmosDBTrigger', trigger=True): 81 | pass 82 | -------------------------------------------------------------------------------- /azure/functions/mysql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import collections.abc 5 | import typing 6 | 7 | from azure.functions import _mysql as mysql 8 | 9 | from . import meta 10 | from ._jsonutils import json 11 | 12 | 13 | class MySqlConverter(meta.InConverter, meta.OutConverter, 14 | binding='mysql'): 15 | 16 | @classmethod 17 | def check_input_type_annotation(cls, pytype: type) -> bool: 18 | return issubclass(pytype, mysql.BaseMySqlRowList) 19 | 20 | @classmethod 21 | def check_output_type_annotation(cls, pytype: type) -> bool: 22 | return issubclass(pytype, (mysql.BaseMySqlRowList, mysql.BaseMySqlRow)) 23 | 24 | @classmethod 25 | def decode(cls, 26 | data: meta.Datum, 27 | *, 28 | trigger_metadata) -> typing.Optional[mysql.MySqlRowList]: 29 | if data is None or data.type is None: 30 | return None 31 | 32 | data_type = data.type 33 | 34 | if data_type in ['string', 'json']: 35 | body = data.value 36 | 37 | elif data_type == 'bytes': 38 | body = data.value.decode('utf-8') 39 | 40 | else: 41 | raise NotImplementedError( 42 | f'Unsupported payload type: {data_type}') 43 | 44 | rows = json.loads(body) 45 | if not isinstance(rows, list): 46 | rows = [rows] 47 | 48 | return mysql.MySqlRowList( 49 | (None if row is None else mysql.MySqlRow.from_dict(row)) 50 | for row in rows) 51 | 52 | @classmethod 53 | def encode(cls, obj: typing.Any, *, 54 | expected_type: typing.Optional[type]) -> meta.Datum: 55 | if isinstance(obj, mysql.MySqlRow): 56 | data = mysql.MySqlRowList([obj]) 57 | 58 | elif isinstance(obj, mysql.MySqlRowList): 59 | data = obj 60 | 61 | elif isinstance(obj, collections.abc.Iterable): 62 | data = mysql.MySqlRowList() 63 | 64 | for row in obj: 65 | if not isinstance(row, mysql.MySqlRow): 66 | raise NotImplementedError( 67 | f'Unsupported list type: {type(obj)}, \ 68 | lists must contain MySqlRow objects') 69 | else: 70 | data.append(row) 71 | 72 | else: 73 | raise NotImplementedError(f'Unsupported type: {type(obj)}') 74 | 75 | return meta.Datum( 76 | type='json', 77 | value=json.dumps([dict(d) for d in data]) 78 | ) 79 | 80 | 81 | class MySqlTriggerConverter(MySqlConverter, 82 | binding='mysqlTrigger', trigger=True): 83 | pass 84 | -------------------------------------------------------------------------------- /tests/test_timer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import json 5 | import unittest 6 | 7 | import azure.functions.timer as timer 8 | from azure.functions.meta import Datum 9 | 10 | 11 | class TestTimer(unittest.TestCase): 12 | def test_timer_decode(self): 13 | # given 14 | data = '''{"Schedule":{"AdjustForDST":true}, 15 | "ScheduleStatus":{ 16 | "Last":"2022-03-28T15:40:00.0105419-05:00", 17 | "Next":"2022-03-28T15:45:00-05:00", 18 | "LastUpdated":"2022-03-28T15:40:00.0105419-05:00"}, 19 | "IsPastDue":false}''' 20 | datum: Datum = Datum(value=data, type='json') 21 | data_dict = json.loads(data) 22 | 23 | # when 24 | timer_request: timer.TimerRequest = \ 25 | timer.TimerRequestConverter.decode(datum, trigger_metadata={}) 26 | 27 | # then 28 | self.assertEqual(timer_request.schedule, data_dict["Schedule"]) 29 | self.assertEqual(timer_request.schedule_status, 30 | data_dict["ScheduleStatus"]) 31 | self.assertEqual(timer_request.past_due, data_dict["IsPastDue"]) 32 | 33 | def test_timer_initialize_without_args(self): 34 | # given 35 | past_due = False 36 | schedule_status = {} 37 | schedule = {} 38 | 39 | # when 40 | test_timer = timer.TimerRequest() 41 | 42 | # then 43 | self.assertEqual(past_due, test_timer.past_due) 44 | self.assertEqual(schedule_status, test_timer.schedule_status) 45 | self.assertEqual(schedule, test_timer.schedule) 46 | 47 | def test_timer_initialize_empty_dicts(self): 48 | # given 49 | past_due = False 50 | 51 | # when 52 | test_timer = timer.TimerRequest() 53 | 54 | # then 55 | self.assertEqual(past_due, test_timer.past_due) 56 | self.assertEqual({}, test_timer.schedule_status) 57 | self.assertEqual({}, test_timer.schedule) 58 | 59 | def test_timer_no_implementation_exception(self): 60 | # given 61 | datum: Datum = Datum(value="test", type='string') 62 | is_exception_raised = False 63 | 64 | # when 65 | try: 66 | timer.TimerRequestConverter.decode(datum, trigger_metadata={}) 67 | except NotImplementedError: 68 | is_exception_raised = True 69 | 70 | # then 71 | self.assertTrue(is_exception_raised) 72 | 73 | def test_timer_input_type(self): 74 | check_input_type = ( 75 | timer.TimerRequestConverter.check_input_type_annotation 76 | ) 77 | self.assertTrue(check_input_type(timer.TimerRequest)) 78 | self.assertFalse(check_input_type(str)) 79 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Security 4 | 5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 6 | 7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. 8 | 9 | ## Reporting Security Issues 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues.** 12 | 13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). 14 | 15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). 16 | 17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). 18 | 19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 20 | 21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 22 | * Full paths of source file(s) related to the manifestation of the issue 23 | * The location of the affected source code (tag/branch/commit or direct URL) 24 | * Any special configuration required to reproduce the issue 25 | * Step-by-step instructions to reproduce the issue 26 | * Proof-of-concept or exploit code (if possible) 27 | * Impact of the issue, including how an attacker might exploit the issue 28 | 29 | This information will help us triage your report more quickly. 30 | 31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. 32 | 33 | ## Preferred Languages 34 | 35 | We prefer all communications to be in English. 36 | 37 | ## Policy 38 | 39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). 40 | 41 | 42 | -------------------------------------------------------------------------------- /tests/decorators/test_sql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import SQL_TRIGGER, SQL 6 | from azure.functions.decorators.core import BindingDirection, DataType 7 | from azure.functions.decorators.sql import SqlTrigger, \ 8 | SqlInput, SqlOutput 9 | 10 | 11 | class TestSql(unittest.TestCase): 12 | def test_sql_trigger_valid_creation(self): 13 | trigger = SqlTrigger(name="req", 14 | table_name="dummy_table", 15 | connection_string_setting="dummy_setting", 16 | data_type=DataType.UNDEFINED, 17 | dummy_field="dummy") 18 | 19 | self.assertEqual(trigger.get_binding_name(), "sqlTrigger") 20 | self.assertEqual(trigger.get_dict_repr(), 21 | {"connectionStringSetting": "dummy_setting", 22 | "dataType": DataType.UNDEFINED, 23 | "tableName": "dummy_table", 24 | "direction": BindingDirection.IN, 25 | "dummyField": "dummy", 26 | "name": "req", 27 | "type": SQL_TRIGGER}) 28 | 29 | def test_sql_output_valid_creation(self): 30 | output = SqlOutput(name="req", 31 | command_text="dummy_table", 32 | connection_string_setting="dummy_setting", 33 | data_type=DataType.UNDEFINED, 34 | dummy_field="dummy") 35 | self.assertEqual(output.get_binding_name(), "sql") 36 | self.assertEqual(output.get_dict_repr(), 37 | {"commandText": "dummy_table", 38 | "connectionStringSetting": "dummy_setting", 39 | "dataType": DataType.UNDEFINED, 40 | "direction": BindingDirection.OUT, 41 | "dummyField": "dummy", 42 | "name": "req", 43 | "type": SQL}) 44 | 45 | def test_sql_input_valid_creation(self): 46 | input = SqlInput(name="req", 47 | command_text="dummy_query", 48 | connection_string_setting="dummy_setting", 49 | data_type=DataType.UNDEFINED, 50 | dummy_field="dummy") 51 | self.assertEqual(input.get_binding_name(), "sql") 52 | self.assertEqual(input.get_dict_repr(), 53 | {"commandText": "dummy_query", 54 | "connectionStringSetting": "dummy_setting", 55 | "commandType": "Text", 56 | "dataType": DataType.UNDEFINED, 57 | "direction": BindingDirection.IN, 58 | "dummyField": "dummy", 59 | "name": "req", 60 | "type": SQL}) 61 | -------------------------------------------------------------------------------- /azure/functions/_queue.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import datetime 5 | import typing 6 | 7 | from . import _abc 8 | from ._jsonutils import json 9 | 10 | 11 | class QueueMessage(_abc.QueueMessage): 12 | """A Queue message object. 13 | 14 | :param str id: 15 | An optional string specifying the ID of the message. 16 | 17 | :param body: 18 | A string or bytes instance specifying the message body. 19 | 20 | :param str pop_receipt: 21 | An optional string containing the pop receipt token. 22 | """ 23 | 24 | def __init__(self, *, 25 | id: typing.Optional[str] = None, 26 | body: typing.Optional[typing.Union[str, bytes]] = None, 27 | pop_receipt: typing.Optional[str] = None) -> None: 28 | self.__id = id 29 | self.__body = b'' 30 | self.__pop_receipt = pop_receipt 31 | 32 | if body is not None: 33 | self.__set_body(body) 34 | 35 | @property 36 | def id(self) -> typing.Optional[str]: 37 | """Message ID.""" 38 | return self.__id 39 | 40 | @property 41 | def dequeue_count(self) -> typing.Optional[int]: 42 | """The number of times this message has been dequeued.""" 43 | return None 44 | 45 | @property 46 | def expiration_time(self) -> typing.Optional[datetime.datetime]: 47 | """A datetime object with the message expiry time.""" 48 | return None 49 | 50 | @property 51 | def insertion_time(self) -> typing.Optional[datetime.datetime]: 52 | """A datetime object with the message queue insertion time.""" 53 | return None 54 | 55 | @property 56 | def time_next_visible(self) -> typing.Optional[datetime.datetime]: 57 | """A datetime object with the time the message will be visible next.""" 58 | return None 59 | 60 | @property 61 | def pop_receipt(self) -> typing.Optional[str]: 62 | """The message pop receipt token as a string.""" 63 | return self.__pop_receipt 64 | 65 | def __set_body(self, body): 66 | if isinstance(body, str): 67 | body = body.encode('utf-8') 68 | 69 | if not isinstance(body, (bytes, bytearray)): 70 | raise TypeError( 71 | f'response is expected to be either of ' 72 | f'str, bytes, or bytearray, got {type(body).__name__}') 73 | 74 | self.__body = bytes(body) 75 | 76 | def get_body(self) -> bytes: 77 | """Return message content as bytes.""" 78 | return self.__body 79 | 80 | def get_json(self) -> typing.Any: 81 | """Decode and return message content as a JSON object. 82 | 83 | :return: 84 | Decoded JSON data. 85 | 86 | :raises ValueError: 87 | when the body of the message does not contain valid JSON data. 88 | """ 89 | return json.loads(self.__body) 90 | 91 | def __repr__(self) -> str: 92 | return ( 93 | f'' 94 | ) 95 | -------------------------------------------------------------------------------- /tests/decorators/test_mysql.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import MYSQL, MYSQL_TRIGGER 6 | from azure.functions.decorators.core import BindingDirection, DataType 7 | from azure.functions.decorators.mysql import MySqlInput, \ 8 | MySqlOutput, MySqlTrigger 9 | 10 | 11 | class TestMySql(unittest.TestCase): 12 | def test_mysql_input_valid_creation(self): 13 | input = MySqlInput(name="req", 14 | command_text="dummy_query", 15 | connection_string_setting="dummy_setting", 16 | data_type=DataType.UNDEFINED, 17 | dummy_field="dummy") 18 | self.assertEqual(input.get_binding_name(), "mysql") 19 | self.assertEqual(input.get_dict_repr(), 20 | {"commandText": "dummy_query", 21 | "connectionStringSetting": "dummy_setting", 22 | "commandType": "Text", 23 | "dataType": DataType.UNDEFINED, 24 | "direction": BindingDirection.IN, 25 | "dummyField": "dummy", 26 | "name": "req", 27 | "type": MYSQL}) 28 | 29 | def test_mysql_output_valid_creation(self): 30 | output = MySqlOutput(name="req", 31 | command_text="dummy_table", 32 | connection_string_setting="dummy_setting", 33 | data_type=DataType.UNDEFINED, 34 | dummy_field="dummy") 35 | self.assertEqual(output.get_binding_name(), "mysql") 36 | self.assertEqual(output.get_dict_repr(), 37 | {"commandText": "dummy_table", 38 | "connectionStringSetting": "dummy_setting", 39 | "dataType": DataType.UNDEFINED, 40 | "direction": BindingDirection.OUT, 41 | "dummyField": "dummy", 42 | "name": "req", 43 | "type": MYSQL}) 44 | 45 | def test_mysql_trigger_valid_creation(self): 46 | trigger = MySqlTrigger(name="req", 47 | table_name="dummy_table", 48 | connection_string_setting="dummy_setting", 49 | data_type=DataType.UNDEFINED, 50 | dummy_field="dummy") 51 | 52 | self.assertEqual(trigger.get_binding_name(), "mysqlTrigger") 53 | self.assertEqual(trigger.get_dict_repr(), 54 | {"connectionStringSetting": "dummy_setting", 55 | "dataType": DataType.UNDEFINED, 56 | "tableName": "dummy_table", 57 | "direction": BindingDirection.IN, 58 | "dummyField": "dummy", 59 | "name": "req", 60 | "type": MYSQL_TRIGGER}) 61 | -------------------------------------------------------------------------------- /tests/decorators/test_generic.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import HTTP_TRIGGER, COSMOS_DB, BLOB 6 | from azure.functions.decorators.core import BindingDirection, AuthLevel, \ 7 | DataType 8 | from azure.functions.decorators.generic import GenericInputBinding, \ 9 | GenericTrigger, GenericOutputBinding 10 | 11 | 12 | class TestGeneric(unittest.TestCase): 13 | def test_generic_trigger_valid_creation(self): 14 | trigger = GenericTrigger(name="req", 15 | type=HTTP_TRIGGER, 16 | data_type=DataType.UNDEFINED, 17 | auth_level=AuthLevel.ANONYMOUS, 18 | methods=["GET", "POST"], 19 | route="dummy") 20 | 21 | self.assertEqual(trigger.get_binding_name(), None) 22 | self.assertEqual(trigger.type, HTTP_TRIGGER) 23 | self.assertEqual(trigger.get_dict_repr(), { 24 | "authLevel": AuthLevel.ANONYMOUS, 25 | "type": HTTP_TRIGGER, 26 | "direction": BindingDirection.IN, 27 | "name": 'req', 28 | "dataType": DataType.UNDEFINED, 29 | "route": 'dummy', 30 | "methods": ["GET", "POST"] 31 | }) 32 | 33 | def test_generic_input_valid_creation(self): 34 | cosmosdb_input = GenericInputBinding( 35 | name="inDocs", 36 | type=COSMOS_DB, 37 | database_name="dummy_db", 38 | collection_name="dummy_collection", 39 | connection_string_setting="dummy_str", 40 | id='dummy_id', 41 | partitionKey='dummy_partitions', 42 | sqlQuery='dummy_query') 43 | self.assertEqual(cosmosdb_input.get_binding_name(), None) 44 | self.assertEqual(cosmosdb_input.get_dict_repr(), 45 | {'collectionName': 'dummy_collection', 46 | 'connectionStringSetting': 'dummy_str', 47 | 'databaseName': 'dummy_db', 48 | 'direction': BindingDirection.IN, 49 | 'id': 'dummy_id', 50 | 'name': 'inDocs', 51 | 'partitionKey': 'dummy_partitions', 52 | 'sqlQuery': 'dummy_query', 53 | 'type': COSMOS_DB}) 54 | 55 | def test_generic_output_valid_creation(self): 56 | blob_output = GenericOutputBinding(name="res", type=BLOB, 57 | data_type=DataType.UNDEFINED, 58 | path="dummy_path", 59 | connection="dummy_connection") 60 | 61 | self.assertEqual(blob_output.get_binding_name(), None) 62 | self.assertEqual(blob_output.get_dict_repr(), { 63 | "type": BLOB, 64 | "direction": BindingDirection.OUT, 65 | "name": "res", 66 | "dataType": DataType.UNDEFINED, 67 | "path": "dummy_path", 68 | "connection": "dummy_connection" 69 | }) 70 | -------------------------------------------------------------------------------- /tests/test_code_quality.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import pathlib 5 | import subprocess 6 | import sys 7 | import unittest 8 | import re 9 | import azure.functions as func 10 | 11 | 12 | ROOT_PATH = pathlib.Path(__file__).parent.parent 13 | 14 | 15 | class TestCodeQuality(unittest.TestCase): 16 | def test_mypy(self): 17 | try: 18 | import mypy # NoQA 19 | except ImportError: 20 | raise unittest.SkipTest('mypy module is missing') 21 | 22 | try: 23 | subprocess.run( 24 | [sys.executable, '-m', 'mypy', '-p', 'azure.functions'], 25 | check=True, 26 | stdout=subprocess.PIPE, 27 | stderr=subprocess.PIPE, 28 | cwd=str(ROOT_PATH)) 29 | except subprocess.CalledProcessError as ex: 30 | output = ex.output.decode() 31 | raise AssertionError( 32 | f'mypy validation failed:\n{output}') from None 33 | 34 | def test_flake8(self): 35 | try: 36 | import flake8 # NoQA 37 | except ImportError: 38 | raise unittest.SkipTest('flake8 module is missing') 39 | 40 | config_path = ROOT_PATH / '.flake8' 41 | if not config_path.exists(): 42 | raise unittest.SkipTest('could not locate the .flake8 file') 43 | 44 | try: 45 | subprocess.run( 46 | [sys.executable, '-m', 'flake8', '--config', str(config_path), 47 | "--extend-ignore=D"], 48 | check=True, 49 | stdout=subprocess.PIPE, 50 | stderr=subprocess.PIPE, 51 | cwd=str(ROOT_PATH)) 52 | except subprocess.CalledProcessError as ex: 53 | output = ex.output.decode() 54 | raise AssertionError( 55 | f'flake8 validation failed:\n{output}') from None 56 | 57 | def test_pydocs(self): 58 | try: 59 | import flake8 # NoQA 60 | except ImportError: 61 | raise unittest.SkipTest('flake8 module is missing') 62 | 63 | config_path = ROOT_PATH / '.flake8' 64 | if not config_path.exists(): 65 | raise unittest.SkipTest('could not locate the .flake8 file') 66 | 67 | try: 68 | subprocess.run( 69 | [sys.executable, '-m', 'flake8', '--config', str(config_path), 70 | "azure/functions/decorators/function_app.py", "--select=D"], 71 | check=True, 72 | stdout=subprocess.PIPE, 73 | stderr=subprocess.PIPE, 74 | cwd=str(ROOT_PATH)) 75 | except subprocess.CalledProcessError as ex: 76 | output = ex.output.decode() 77 | raise AssertionError( 78 | f'flake8 validation failed:\n{output}') from None 79 | 80 | def test_library_version(self): 81 | # PEP 440 Parsing version strings with regular expressions 82 | is_valid = re.match( 83 | r'^([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))' 84 | r'*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))' 85 | r'?(\.dev(0|[1-9][0-9]*))?$', func.__version__) is not None 86 | self.assertTrue(is_valid, '__version__ field must be canonical') 87 | -------------------------------------------------------------------------------- /tests/test_mcp.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | import azure.functions as func 5 | from azure.functions.meta import Datum 6 | from azure.functions.mcp import MCPToolTriggerConverter 7 | 8 | 9 | class TestMCPToolTriggerConverter(unittest.TestCase): 10 | """Unit tests for MCPToolTriggerConverter""" 11 | 12 | def test_check_input_type_annotation_valid_types(self): 13 | self.assertTrue(MCPToolTriggerConverter.check_input_type_annotation(str)) 14 | self.assertTrue(MCPToolTriggerConverter.check_input_type_annotation(dict)) 15 | self.assertTrue(MCPToolTriggerConverter.check_input_type_annotation(bytes)) 16 | self.assertTrue(MCPToolTriggerConverter.check_input_type_annotation(func.MCPToolContext)) 17 | 18 | def test_check_input_type_annotation_invalid_type(self): 19 | with self.assertRaises(TypeError): 20 | MCPToolTriggerConverter.check_input_type_annotation(123) # not a type 21 | 22 | class Dummy: 23 | pass 24 | self.assertFalse(MCPToolTriggerConverter.check_input_type_annotation(Dummy)) 25 | 26 | def test_has_implicit_output(self): 27 | self.assertTrue(MCPToolTriggerConverter.has_implicit_output()) 28 | 29 | def test_decode_json(self): 30 | data = Datum(type='json', value={'foo': 'bar'}) 31 | result = MCPToolTriggerConverter.decode(data, trigger_metadata={}) 32 | self.assertEqual(result, {'foo': 'bar'}) 33 | 34 | def test_decode_string(self): 35 | data = Datum(type='string', value='hello') 36 | result = MCPToolTriggerConverter.decode(data, trigger_metadata={}) 37 | self.assertEqual(result, 'hello') 38 | 39 | def test_decode_bytes(self): 40 | data = Datum(type='bytes', value=b'data') 41 | result = MCPToolTriggerConverter.decode(data, trigger_metadata={}) 42 | self.assertEqual(result, b'data') 43 | 44 | def test_decode_other_without_python_value(self): 45 | data = Datum(type='other', value='fallback') 46 | result = MCPToolTriggerConverter.decode(data, trigger_metadata={}) 47 | self.assertEqual(result, 'fallback') 48 | 49 | def test_encode_none(self): 50 | result = MCPToolTriggerConverter.encode(None) 51 | self.assertEqual(result.type, 'string') 52 | self.assertEqual(result.value, '') 53 | 54 | def test_encode_string(self): 55 | result = MCPToolTriggerConverter.encode('hello') 56 | self.assertEqual(result.type, 'string') 57 | self.assertEqual(result.value, 'hello') 58 | 59 | def test_encode_bytes(self): 60 | result = MCPToolTriggerConverter.encode(b'\x00\x01') 61 | self.assertEqual(result.type, 'bytes') 62 | self.assertEqual(result.value, b'\x00\x01') 63 | 64 | def test_encode_bytearray(self): 65 | result = MCPToolTriggerConverter.encode(bytearray(b'\x01\x02')) 66 | self.assertEqual(result.type, 'bytes') 67 | self.assertEqual(result.value, b'\x01\x02') 68 | 69 | def test_encode_other_type(self): 70 | result = MCPToolTriggerConverter.encode(42) 71 | self.assertEqual(result.type, 'string') 72 | self.assertEqual(result.value, '42') 73 | 74 | result = MCPToolTriggerConverter.encode({'a': 1}) 75 | self.assertEqual(result.type, 'string') 76 | self.assertIn("'a'", result.value) 77 | -------------------------------------------------------------------------------- /azure/functions/_eventgrid.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import datetime 5 | import typing 6 | 7 | from azure.functions import _abc as azf_abc 8 | 9 | 10 | class EventGridEvent(azf_abc.EventGridEvent): 11 | """An EventGrid event message.""" 12 | 13 | def __init__(self, *, 14 | id: str, 15 | data: typing.Dict[str, object], 16 | topic: str, 17 | subject: str, 18 | event_type: str, 19 | event_time: typing.Optional[datetime.datetime], 20 | data_version: str) -> None: 21 | self.__id = id 22 | self.__data = data 23 | self.__subject = subject 24 | self.__topic = topic 25 | self.__event_type = event_type 26 | self.__event_time = event_time 27 | self.__data_version = data_version 28 | 29 | @property 30 | def id(self) -> str: 31 | return self.__id 32 | 33 | def get_json(self) -> typing.Any: 34 | return self.__data 35 | 36 | @property 37 | def topic(self) -> str: 38 | return self.__topic 39 | 40 | @property 41 | def subject(self) -> str: 42 | return self.__subject 43 | 44 | @property 45 | def event_type(self) -> str: 46 | return self.__event_type 47 | 48 | @property 49 | def event_time(self) -> typing.Optional[datetime.datetime]: 50 | return self.__event_time 51 | 52 | @property 53 | def data_version(self) -> str: 54 | return self.__data_version 55 | 56 | def __repr__(self) -> str: 57 | return ( 58 | f'' 62 | ) 63 | 64 | 65 | class EventGridOutputEvent(azf_abc.EventGridOutputEvent): 66 | """An EventGrid event message.""" 67 | 68 | def __init__(self, *, 69 | id: str, 70 | data: typing.Dict[str, object], 71 | subject: str, 72 | event_type: str, 73 | event_time: typing.Optional[datetime.datetime], 74 | data_version: str) -> None: 75 | self.__id = id 76 | self.__data = data 77 | self.__subject = subject 78 | self.__event_type = event_type 79 | self.__event_time = event_time 80 | self.__data_version = data_version 81 | 82 | @property 83 | def id(self) -> str: 84 | return self.__id 85 | 86 | def get_json(self) -> typing.Any: 87 | return self.__data 88 | 89 | @property 90 | def subject(self) -> str: 91 | return self.__subject 92 | 93 | @property 94 | def event_type(self) -> str: 95 | return self.__event_type 96 | 97 | @property 98 | def event_time(self) -> typing.Optional[datetime.datetime]: 99 | return self.__event_time 100 | 101 | @property 102 | def data_version(self) -> str: 103 | return self.__data_version 104 | 105 | def __repr__(self) -> str: 106 | return ( 107 | f'' 111 | ) 112 | -------------------------------------------------------------------------------- /azure/functions/_eventhub.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import datetime 5 | import typing 6 | 7 | from azure.functions import _abc as func_abc 8 | from azure.functions import meta 9 | 10 | 11 | class EventHubEvent(func_abc.EventHubEvent): 12 | """A concrete implementation of Event Hub message type.""" 13 | 14 | def __init__(self, *, 15 | body: bytes, 16 | trigger_metadata: typing.Optional[ 17 | typing.Mapping[str, meta.Datum]] = None, 18 | enqueued_time: typing.Optional[datetime.datetime] = None, 19 | partition_key: typing.Optional[str] = None, 20 | sequence_number: typing.Optional[int] = None, 21 | offset: typing.Optional[str] = None, 22 | iothub_metadata: typing.Optional[ 23 | typing.Mapping[str, str]] = None) -> None: 24 | self.__body = body 25 | self.__trigger_metadata = trigger_metadata 26 | self.__enqueued_time = enqueued_time 27 | self.__partition_key = partition_key 28 | self.__sequence_number = sequence_number 29 | self.__offset = offset 30 | self.__iothub_metadata = iothub_metadata 31 | 32 | # Cache for trigger metadata after Python object conversion 33 | self._trigger_metadata_pyobj: typing.Optional[ 34 | typing.Mapping[str, typing.Any]] = None 35 | 36 | def get_body(self) -> bytes: 37 | return self.__body 38 | 39 | @property 40 | def partition_key(self) -> typing.Optional[str]: 41 | return self.__partition_key 42 | 43 | @property 44 | def iothub_metadata(self) -> typing.Optional[typing.Mapping[str, str]]: 45 | return self.__iothub_metadata 46 | 47 | @property 48 | def sequence_number(self) -> typing.Optional[int]: 49 | return self.__sequence_number 50 | 51 | @property 52 | def enqueued_time(self) -> typing.Optional[datetime.datetime]: 53 | return self.__enqueued_time 54 | 55 | @property 56 | def offset(self) -> typing.Optional[str]: 57 | return self.__offset 58 | 59 | @property 60 | def metadata(self) -> typing.Optional[typing.Mapping[str, typing.Any]]: 61 | """Getting read-only trigger metadata in a Python dictionary. 62 | 63 | Exposing the raw trigger_metadata to our customer. For cardinality=many 64 | scenarios, each event points to the common metadata of all the events. 65 | 66 | So when using metadata field when cardinality=many, it only needs to 67 | take one of the events to get all the data (e.g. events[0].metadata). 68 | 69 | Returns: 70 | -------- 71 | typing.Mapping[str, object] 72 | Return the Python dictionary of trigger metadata 73 | """ 74 | if self.__trigger_metadata is None: 75 | return None 76 | 77 | if self._trigger_metadata_pyobj is None: 78 | self._trigger_metadata_pyobj = { 79 | k: v.python_value for (k, v) in self.__trigger_metadata.items() 80 | } 81 | return self._trigger_metadata_pyobj 82 | 83 | def __repr__(self) -> str: 84 | return ( 85 | f'' 90 | ) 91 | -------------------------------------------------------------------------------- /azure/functions/_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from typing import List, Tuple, Optional 5 | from datetime import datetime, timedelta 6 | 7 | 8 | def try_parse_datetime_with_formats( 9 | datetime_str: str, 10 | datetime_formats: List[str] 11 | ) -> Tuple[Optional[datetime], Optional[str], Optional[Exception]]: 12 | """Try parsing the datetime string with a list of formats 13 | Parameters 14 | ---------- 15 | datetime_str: str 16 | The datetime string needs to be parsed (e.g. 2018-12-12T03:16:34.2191Z) 17 | datetime_formats: List[str] 18 | A list of datetime formats that the parser would try to match 19 | 20 | Returns 21 | ------- 22 | dict_obj: A serializable dictionary with enough metadata to reconstruct 23 | `obj` 24 | 25 | Exceptions 26 | ---------- 27 | Tuple[Optional[datetime], Optional[str], Optional[Exception]]: 28 | If the datetime can be successfully parsed, the first element is the 29 | paresd datetime object and the second is the matched format. 30 | If the datetime cannot be parsed, the first and second element will be 31 | None, and the third is the exception from the datetime.strptime() 32 | method. 33 | """ 34 | for fmt in datetime_formats: 35 | try: 36 | dt = datetime.strptime(datetime_str, fmt) 37 | return (dt, fmt, None) 38 | except ValueError as ve: 39 | last_exception = ve 40 | 41 | return (None, None, last_exception) 42 | 43 | 44 | def try_parse_timedelta_with_formats( 45 | timedelta_str: str, 46 | timedelta_formats: List[str] 47 | ) -> Tuple[Optional[timedelta], Optional[str], Optional[Exception]]: 48 | """Try parsing the datetime delta string with a list of formats 49 | Parameters 50 | ---------- 51 | timedelta_str: str 52 | The timedelta string needs to be parsed (e.g. 12:34:56) 53 | timedelta_formats: List[str] 54 | A list of datetime formats that the parser would try to match 55 | 56 | Returns 57 | ------- 58 | dict_obj: A serializable dictionary with enough metadata to reconstruct 59 | `obj` 60 | 61 | Exceptions 62 | ---------- 63 | Tuple[Optional[timedelta], Optional[str], Optional[Exception]]: 64 | If the timedelta can be successfully parsed, the first element is the 65 | paresd timedelta object and the second is the matched format. 66 | If the timedelta cannot be parsed, the first and second element will be 67 | None, and the third is the exception from the datetime.strptime() 68 | method. 69 | """ 70 | 71 | for fmt in timedelta_formats: 72 | try: 73 | # If singular form %S, %M, %H, will just return the timedelta 74 | if fmt == '%S': 75 | td = timedelta(seconds=int(timedelta_str)) 76 | elif fmt == '%M': 77 | td = timedelta(minutes=int(timedelta_str)) 78 | elif fmt == '%H': 79 | td = timedelta(hours=int(timedelta_str)) 80 | else: 81 | dt = datetime.strptime(timedelta_str, fmt) 82 | td = timedelta(hours=dt.hour, 83 | minutes=dt.minute, 84 | seconds=dt.second) 85 | return (td, fmt, None) 86 | except ValueError as ve: 87 | last_exception = ve 88 | 89 | return (None, None, last_exception) 90 | -------------------------------------------------------------------------------- /azure/functions/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from ._abc import TimerRequest, InputStream, Context, Out 5 | from ._eventhub import EventHubEvent 6 | from ._eventgrid import EventGridEvent, EventGridOutputEvent 7 | from ._cosmosdb import Document, DocumentList 8 | from ._http import HttpRequest, HttpResponse 9 | from .decorators import (FunctionApp, Function, Blueprint, 10 | DecoratorApi, DataType, AuthLevel, 11 | Cardinality, AccessRights, HttpMethod, 12 | AsgiFunctionApp, WsgiFunctionApp, 13 | ExternalHttpFunctionApp, BlobSource, McpPropertyType) 14 | from ._durable_functions import OrchestrationContext, EntityContext 15 | from .decorators.function_app import (FunctionRegister, TriggerApi, 16 | BindingApi, SettingsApi) 17 | from .extension import (ExtensionMeta, FunctionExtensionException, 18 | FuncExtensionBase, AppExtensionBase) 19 | from ._http_wsgi import WsgiMiddleware 20 | from ._http_asgi import AsgiMiddleware 21 | from .kafka import KafkaEvent, KafkaConverter, KafkaTriggerConverter 22 | from .mcp import MCPToolContext 23 | from .meta import get_binding_registry 24 | from ._queue import QueueMessage 25 | from ._servicebus import ServiceBusMessage 26 | from ._sql import SqlRow, SqlRowList 27 | from ._mysql import MySqlRow, MySqlRowList 28 | 29 | # Import binding implementations to register them 30 | from . import blob # NoQA 31 | from . import cosmosdb # NoQA 32 | from . import eventgrid # NoQA 33 | from . import eventhub # NoQA 34 | from . import http # NoQA 35 | from . import kafka # NoQA 36 | from . import mcp # NoQA 37 | from . import queue # NoQA 38 | from . import servicebus # NoQA 39 | from . import timer # NoQA 40 | from . import durable_functions # NoQA 41 | from . import sql # NoQA 42 | from . import warmup # NoQA 43 | from . import mysql # NoQA 44 | 45 | 46 | __all__ = ( 47 | # Functions 48 | 'get_binding_registry', 49 | 50 | # Generics. 51 | 'Context', 52 | 'Out', 53 | 54 | # Binding rich types, sorted alphabetically. 55 | 'Document', 56 | 'DocumentList', 57 | 'EventGridEvent', 58 | 'EventGridOutputEvent', 59 | 'EventHubEvent', 60 | 'HttpRequest', 61 | 'HttpResponse', 62 | 'InputStream', 63 | 'KafkaEvent', 64 | 'KafkaConverter', 65 | 'KafkaTriggerConverter', 66 | 'OrchestrationContext', 67 | 'EntityContext', 68 | 'QueueMessage', 69 | 'ServiceBusMessage', 70 | 'SqlRow', 71 | 'SqlRowList', 72 | 'TimerRequest', 73 | 'WarmUpContext', 74 | 'MySqlRow', 75 | 'MySqlRowList', 76 | 77 | # Middlewares 78 | 'WsgiMiddleware', 79 | 'AsgiMiddleware', 80 | 81 | # Extensions 82 | 'AppExtensionBase', 83 | 'FuncExtensionBase', 84 | 'ExtensionMeta', 85 | 'FunctionExtensionException', 86 | 87 | # PyStein implementation 88 | 'FunctionApp', 89 | 'Function', 90 | 'FunctionRegister', 91 | 'DecoratorApi', 92 | 'TriggerApi', 93 | 'BindingApi', 94 | 'SettingsApi', 95 | 'Blueprint', 96 | 'ExternalHttpFunctionApp', 97 | 'AsgiFunctionApp', 98 | 'WsgiFunctionApp', 99 | 'DataType', 100 | 'AuthLevel', 101 | 'Cardinality', 102 | 'AccessRights', 103 | 'HttpMethod', 104 | 'BlobSource', 105 | 'MCPToolContext', 106 | 'McpPropertyType' 107 | ) 108 | 109 | __version__ = '1.25.0b2' 110 | -------------------------------------------------------------------------------- /azure/functions/decorators/servicebus.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import SERVICE_BUS_TRIGGER, \ 6 | SERVICE_BUS 7 | from azure.functions.decorators.core import Trigger, OutputBinding, DataType, \ 8 | Cardinality, AccessRights 9 | 10 | 11 | class ServiceBusQueueTrigger(Trigger): 12 | @staticmethod 13 | def get_binding_name() -> str: 14 | return SERVICE_BUS_TRIGGER 15 | 16 | def __init__(self, 17 | name: str, 18 | connection: str, 19 | queue_name: str, 20 | data_type: Optional[DataType] = None, 21 | access_rights: Optional[AccessRights] = None, 22 | is_sessions_enabled: Optional[bool] = None, 23 | cardinality: Optional[Cardinality] = None, 24 | **kwargs): 25 | self.connection = connection 26 | self.queue_name = queue_name 27 | self.access_rights = access_rights 28 | self.is_sessions_enabled = is_sessions_enabled 29 | self.cardinality = cardinality 30 | super().__init__(name=name, data_type=data_type) 31 | 32 | 33 | class ServiceBusQueueOutput(OutputBinding): 34 | @staticmethod 35 | def get_binding_name() -> str: 36 | return SERVICE_BUS 37 | 38 | def __init__(self, 39 | name: str, 40 | connection: str, 41 | queue_name: str, 42 | data_type: Optional[DataType] = None, 43 | access_rights: Optional[AccessRights] = None, 44 | **kwargs): 45 | self.connection = connection 46 | self.queue_name = queue_name 47 | self.access_rights = access_rights 48 | super().__init__(name=name, data_type=data_type) 49 | 50 | 51 | class ServiceBusTopicTrigger(Trigger): 52 | @staticmethod 53 | def get_binding_name() -> str: 54 | return SERVICE_BUS_TRIGGER 55 | 56 | def __init__(self, 57 | name: str, 58 | connection: str, 59 | topic_name: str, 60 | subscription_name: str, 61 | data_type: Optional[DataType] = None, 62 | access_rights: Optional[AccessRights] = None, 63 | is_sessions_enabled: Optional[bool] = None, 64 | cardinality: Optional[Cardinality] = None, 65 | **kwargs): 66 | self.connection = connection 67 | self.topic_name = topic_name 68 | self.subscription_name = subscription_name 69 | self.access_rights = access_rights 70 | self.is_sessions_enabled = is_sessions_enabled 71 | self.cardinality = cardinality 72 | super().__init__(name=name, data_type=data_type) 73 | 74 | 75 | class ServiceBusTopicOutput(OutputBinding): 76 | @staticmethod 77 | def get_binding_name() -> str: 78 | return SERVICE_BUS 79 | 80 | def __init__(self, 81 | name: str, 82 | connection: str, 83 | topic_name: str, 84 | subscription_name: Optional[str] = None, 85 | data_type: Optional[DataType] = None, 86 | access_rights: Optional[AccessRights] = None, 87 | **kwargs): 88 | self.connection = connection 89 | self.topic_name = topic_name 90 | self.subscription_name = subscription_name 91 | self.access_rights = access_rights 92 | super().__init__(name=name, data_type=data_type) 93 | -------------------------------------------------------------------------------- /tests/decorators/test_eventgrid.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import EVENT_GRID_TRIGGER, EVENT_GRID 6 | from azure.functions.decorators.core import BindingDirection, \ 7 | DataType 8 | from azure.functions.decorators.eventgrid import ( 9 | EventGridTrigger, 10 | EventGridOutput) 11 | 12 | 13 | class TestEventGrid(unittest.TestCase): 14 | def test_event_grid_trigger_valid_creation(self): 15 | trigger = EventGridTrigger(name="req", 16 | data_type=DataType.UNDEFINED, 17 | dummy_field="dummy") 18 | 19 | self.assertEqual(trigger.get_binding_name(), "eventGridTrigger") 20 | self.assertEqual(trigger.get_dict_repr(), 21 | {'name': 'req', 22 | "dataType": DataType.UNDEFINED, 23 | "direction": BindingDirection.IN, 24 | 'dummyField': 'dummy', 25 | "type": EVENT_GRID_TRIGGER}) 26 | 27 | def test_event_grid_output_valid_creation(self): 28 | output = EventGridOutput(name="res", 29 | topic_endpoint_uri="dummy_topic_endpoint_uri", 30 | topic_key_setting="dummy_topic_key_setting", 31 | data_type=DataType.UNDEFINED, 32 | dummy_field="dummy") 33 | 34 | self.assertEqual(output.get_binding_name(), "eventGrid") 35 | self.assertEqual(output.get_dict_repr(), 36 | {'dataType': DataType.UNDEFINED, 37 | 'direction': BindingDirection.OUT, 38 | 'dummyField': 'dummy', 39 | 'topicEndpointUri': 'dummy_topic_endpoint_uri', 40 | 'topicKeySetting': 'dummy_topic_key_setting', 41 | 'name': 'res', 42 | 'type': EVENT_GRID}) 43 | 44 | def test_event_grid_output_valid_creation_with_connection(self): 45 | output = EventGridOutput(name="res", 46 | connection="dummy_connection", 47 | data_type=DataType.UNDEFINED, 48 | dummy_field="dummy") 49 | 50 | self.assertEqual(output.connection, "dummy_connection") 51 | self.assertIsNone(output.topic_endpoint_uri) 52 | self.assertIsNone(output.topic_key_setting) 53 | 54 | def test_event_grid_output_invalid_creation_with_both(self): 55 | with self.assertRaises(ValueError) as context: 56 | EventGridOutput(name="res", 57 | connection="dummy_connection", 58 | topic_endpoint_uri="dummy_topic_endpoint_uri", 59 | topic_key_setting="dummy_topic_key_setting") 60 | 61 | self.assertTrue("Specify either the 'Connection' property or both " 62 | "'TopicKeySetting' and 'TopicEndpointUri' properties, " 63 | "but not both." in str(context.exception)) 64 | 65 | def test_event_grid_output_invalid_creation_with_none(self): 66 | with self.assertRaises(ValueError) as context: 67 | EventGridOutput(name="res", 68 | data_type=DataType.UNDEFINED, 69 | dummy_field="dummy") 70 | 71 | self.assertTrue("Specify either the 'Connection' property or both " 72 | "'TopicKeySetting' and 'TopicEndpointUri' properties," 73 | " but not both." in str(context.exception)) 74 | -------------------------------------------------------------------------------- /azure/functions/_durable_functions.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from typing import Union 5 | from . import _abc 6 | from importlib import import_module 7 | 8 | 9 | # Utilities 10 | def _serialize_custom_object(obj): 11 | """Serialize a user-defined object to JSON. 12 | 13 | This function gets called when `json.dumps` cannot serialize 14 | an object and returns a serializable dictionary containing enough 15 | metadata to recontrust the original object. 16 | 17 | Parameters 18 | ---------- 19 | obj: Object 20 | The object to serialize 21 | 22 | Returns 23 | ------- 24 | dict_obj: A serializable dictionary with enough metadata to reconstruct 25 | `obj` 26 | 27 | Exceptions 28 | ---------- 29 | TypeError: 30 | Raise if `obj` does not contain a `to_json` attribute 31 | """ 32 | # 'safety' guard: raise error if object does not 33 | # support serialization 34 | if not hasattr(obj, "to_json"): 35 | raise TypeError(f"class {type(obj)} does not expose a `to_json` " 36 | "function") 37 | # Encode to json using the object's `to_json` 38 | obj_type = type(obj) 39 | return { 40 | "__class__": obj.__class__.__name__, 41 | "__module__": obj.__module__, 42 | "__data__": obj_type.to_json(obj) 43 | } 44 | 45 | 46 | def _deserialize_custom_object(obj: dict) -> object: 47 | """Deserialize a user-defined object from JSON. 48 | 49 | Deserializes a dictionary encoding a custom object, 50 | if it contains class metadata suggesting that it should be 51 | decoded further. 52 | 53 | Parameters: 54 | ---------- 55 | obj: dict 56 | Dictionary object that potentially encodes a custom class 57 | 58 | Returns: 59 | -------- 60 | object 61 | Either the original `obj` dictionary or the custom object it encoded 62 | 63 | Exceptions 64 | ---------- 65 | TypeError 66 | If the decoded object does not contain a `from_json` function 67 | """ 68 | if ("__class__" in obj) and ("__module__" in obj) and ("__data__" in obj): 69 | class_name = obj.pop("__class__") 70 | module_name = obj.pop("__module__") 71 | obj_data = obj.pop("__data__") 72 | 73 | # Importing the clas 74 | module = import_module(module_name) 75 | class_ = getattr(module, class_name) 76 | 77 | if not hasattr(class_, "from_json"): 78 | raise TypeError(f"class {type(obj)} does not expose a `from_json` " 79 | "function") 80 | 81 | # Initialize the object using its `from_json` deserializer 82 | obj = class_.from_json(obj_data) 83 | return obj 84 | 85 | 86 | class OrchestrationContext(_abc.OrchestrationContext): 87 | """A durable function orchestration context. 88 | 89 | :param str body: 90 | The body of orchestration context json. 91 | """ 92 | 93 | def __init__(self, 94 | body: Union[str, bytes]) -> None: 95 | if isinstance(body, str): 96 | self.__body = body 97 | if isinstance(body, bytes): 98 | self.__body = body.decode('utf-8') 99 | 100 | @property 101 | def body(self) -> str: 102 | return self.__body 103 | 104 | def __repr__(self): 105 | return ( 106 | f'' 108 | ) 109 | 110 | def __str__(self): 111 | return self.__body 112 | 113 | 114 | class EntityContext(_abc.OrchestrationContext): 115 | """A durable function entity context. 116 | 117 | :param str body: 118 | The body of orchestration context json. 119 | """ 120 | 121 | def __init__(self, 122 | body: Union[str, bytes]) -> None: 123 | if isinstance(body, str): 124 | self.__body = body 125 | if isinstance(body, bytes): 126 | self.__body = body.decode('utf-8') 127 | 128 | @property 129 | def body(self) -> str: 130 | return self.__body 131 | 132 | def __repr__(self): 133 | return ( 134 | f'' 136 | ) 137 | 138 | def __str__(self): 139 | return self.__body 140 | -------------------------------------------------------------------------------- /azure/functions/eventgrid.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import collections 5 | import datetime 6 | from typing import Optional, List, Any, Dict, Union 7 | 8 | from azure.functions import _eventgrid as azf_eventgrid 9 | from ._jsonutils import json 10 | 11 | from . import meta 12 | from .meta import Datum 13 | 14 | 15 | class EventGridEventInConverter(meta.InConverter, binding='eventGridTrigger', 16 | trigger=True): 17 | 18 | @classmethod 19 | def check_input_type_annotation(cls, pytype: type) -> bool: 20 | """ 21 | Event Grid always sends an array and may send more than one event in 22 | the array. The runtime invokes function once for each array element, 23 | thus no need to parse List[EventGridEvent] 24 | """ 25 | valid_types = azf_eventgrid.EventGridEvent 26 | return isinstance(pytype, type) and issubclass(pytype, valid_types) 27 | 28 | @classmethod 29 | def decode(cls, data: meta.Datum, *, 30 | trigger_metadata) -> azf_eventgrid.EventGridEvent: 31 | data_type = data.type 32 | 33 | if data_type == 'json': 34 | body = json.loads(data.value) 35 | else: 36 | raise NotImplementedError( 37 | f'unsupported event grid payload type: {data_type}') 38 | 39 | return azf_eventgrid.EventGridEvent( 40 | id=body.get('id'), 41 | topic=body.get('topic'), 42 | subject=body.get('subject'), 43 | event_type=body.get('eventType'), 44 | event_time=cls._parse_datetime(body.get('eventTime')), 45 | data=body.get('data'), 46 | data_version=body.get('dataVersion'), 47 | ) 48 | 49 | 50 | class EventGridEventOutConverter(meta.OutConverter, binding="eventGrid"): 51 | @classmethod 52 | def check_output_type_annotation(cls, pytype: type) -> bool: 53 | valid_types = (str, bytes, azf_eventgrid.EventGridOutputEvent, 54 | List[azf_eventgrid.EventGridOutputEvent]) 55 | return (meta.is_iterable_type_annotation(pytype, str) or meta. 56 | is_iterable_type_annotation(pytype, 57 | azf_eventgrid.EventGridOutputEvent) 58 | or (isinstance(pytype, type) 59 | and issubclass(pytype, valid_types))) 60 | 61 | @classmethod 62 | def encode(cls, obj: Any, *, expected_type: 63 | Optional[type]) -> Optional[Datum]: 64 | if isinstance(obj, str): 65 | return meta.Datum(type='string', value=obj) 66 | 67 | elif isinstance(obj, bytes): 68 | return meta.Datum(type='bytes', value=obj) 69 | 70 | elif isinstance(obj, azf_eventgrid.EventGridOutputEvent): 71 | return meta.Datum( 72 | type='json', 73 | value=json.dumps({ 74 | 'id': obj.id, 75 | 'subject': obj.subject, 76 | 'dataVersion': obj.data_version, 77 | 'eventType': obj.event_type, 78 | 'data': obj.get_json(), 79 | 'eventTime': cls._format_datetime(obj.event_time) 80 | }) 81 | ) 82 | 83 | elif isinstance(obj, collections.abc.Iterable): 84 | msgs: List[Union[str, Dict[str, Any]]] = [] 85 | for item in obj: 86 | if isinstance(item, str): 87 | msgs.append(item) 88 | elif isinstance(item, azf_eventgrid.EventGridOutputEvent): 89 | msgs.append({'id': item.id, 90 | 'subject': item.subject, 91 | 'dataVersion': item.data_version, 92 | 'eventType': item.event_type, 93 | 'data': item.get_json(), 94 | 'eventTime': cls._format_datetime( 95 | item.event_time) 96 | }) 97 | else: 98 | raise NotImplementedError( 99 | 'invalid data type in output ' 100 | 'queue message list: {}'.format(type(item))) 101 | 102 | return meta.Datum( 103 | type='json', 104 | value=json.dumps(msgs) 105 | ) 106 | 107 | raise NotImplementedError 108 | 109 | @classmethod 110 | def _format_datetime(cls, dt: Optional[datetime.datetime]): 111 | if dt is None: 112 | return None 113 | else: 114 | return dt.isoformat() 115 | -------------------------------------------------------------------------------- /azure/functions/decorators/mcp.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import inspect 4 | 5 | from typing import List, Optional, Union, get_origin, get_args 6 | from datetime import datetime 7 | 8 | from ..mcp import MCPToolContext 9 | from azure.functions.decorators.constants import ( 10 | MCP_TOOL_TRIGGER 11 | ) 12 | from azure.functions.decorators.core import Trigger, DataType, McpPropertyType 13 | 14 | # Mapping Python types to MCP property types 15 | _TYPE_MAPPING = { 16 | int: "integer", 17 | float: "number", 18 | str: "string", 19 | bool: "boolean", 20 | object: "object", 21 | datetime: "string" 22 | } 23 | 24 | 25 | class MCPToolTrigger(Trigger): 26 | 27 | @staticmethod 28 | def get_binding_name() -> str: 29 | return MCP_TOOL_TRIGGER 30 | 31 | def __init__(self, 32 | name: str, 33 | tool_name: str, 34 | description: Optional[str] = None, 35 | tool_properties: Optional[str] = None, 36 | data_type: Optional[DataType] = None, 37 | **kwargs): 38 | self.tool_name = tool_name 39 | self.description = description 40 | self.tool_properties = tool_properties 41 | super().__init__(name=name, data_type=data_type) 42 | 43 | 44 | def unwrap_optional(pytype: type): 45 | """If Optional[T], return T; else return pytype unchanged.""" 46 | origin = get_origin(pytype) 47 | args = get_args(pytype) 48 | if origin is Union and any(a is type(None) for a in args): # noqa 49 | non_none_args = [a for a in args if a is not type(None)] # noqa 50 | return non_none_args[0] if non_none_args else str 51 | return pytype 52 | 53 | 54 | def check_as_array(param_type_hint: type) -> bool: 55 | """Return True if type is (possibly optional) list[...]""" 56 | unwrapped = unwrap_optional(param_type_hint) 57 | origin = get_origin(unwrapped) 58 | return origin in (list, List) 59 | 60 | 61 | def check_property_type(pytype: type, as_array: bool) -> str: 62 | """Map Python type hints to MCP property types.""" 63 | if isinstance(pytype, McpPropertyType): 64 | return pytype.value 65 | base_type = unwrap_optional(pytype) 66 | if as_array: 67 | args = get_args(base_type) 68 | inner_type = unwrap_optional(args[0]) if args else str 69 | return _TYPE_MAPPING.get(inner_type, "string") 70 | return _TYPE_MAPPING.get(base_type, "string") 71 | 72 | 73 | def check_is_required(param: type, param_type_hint: type) -> bool: 74 | """ 75 | Return True when param is required, False when optional. 76 | 77 | Rules: 78 | - If param has an explicit default -> not required 79 | - If annotation is Optional[T] (Union[..., None]) -> not required 80 | - Otherwise -> required 81 | """ 82 | # 1) default value present => not required 83 | if param.default is not inspect.Parameter.empty: 84 | return False 85 | 86 | # 2) Optional[T] => not required 87 | origin = get_origin(param_type_hint) 88 | args = get_args(param_type_hint) 89 | if origin is Union and any(a is type(None) for a in args): # noqa 90 | return False 91 | 92 | # 3) It's required 93 | return True 94 | 95 | 96 | def build_property_metadata(sig, 97 | skip_param_names: List[str], 98 | explicit_properties: dict) -> List[dict]: 99 | tool_properties = [] 100 | for param_name, param in sig.parameters.items(): 101 | if param_name in skip_param_names: 102 | continue 103 | param_type_hint = param.annotation if param.annotation != inspect.Parameter.empty else str # noqa 104 | 105 | if param_type_hint is MCPToolContext: 106 | continue 107 | 108 | # Inferred defaults 109 | is_required = check_is_required(param, param_type_hint) 110 | as_array = check_as_array(param_type_hint) 111 | property_type = check_property_type(param_type_hint, as_array) 112 | 113 | property_data = { 114 | "propertyName": param_name, 115 | "propertyType": property_type, 116 | "description": "", 117 | "isArray": as_array, 118 | "isRequired": is_required 119 | } 120 | 121 | # Merge in any explicit overrides 122 | if param_name in explicit_properties: 123 | overrides = explicit_properties[param_name] 124 | for key, value in overrides.items(): 125 | if value is not None: 126 | property_data[key] = value 127 | 128 | tool_properties.append(property_data) 129 | return tool_properties 130 | -------------------------------------------------------------------------------- /azure/functions/blob.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import io 5 | from typing import Any, Optional, Union 6 | 7 | from azure.functions import _abc as azf_abc 8 | from . import meta 9 | 10 | 11 | class InputStream(azf_abc.InputStream): 12 | def __init__(self, *, data: Union[bytes, meta.Datum], 13 | name: Optional[str] = None, 14 | uri: Optional[str] = None, 15 | length: Optional[int] = None, 16 | blob_properties: Optional[dict] = None, 17 | metadata: Optional[dict] = None) -> None: 18 | self._io = io.BytesIO(data) # type: ignore 19 | self._name = name 20 | self._length = length 21 | self._uri = uri 22 | self._blob_properties = blob_properties 23 | self._metadata = metadata 24 | 25 | @property 26 | def name(self) -> Optional[str]: 27 | return self._name 28 | 29 | @property 30 | def length(self) -> Optional[int]: 31 | return self._length 32 | 33 | @property 34 | def uri(self) -> Optional[str]: 35 | return self._uri 36 | 37 | @property 38 | def blob_properties(self): 39 | return self._blob_properties 40 | 41 | @property 42 | def metadata(self): 43 | return self._metadata 44 | 45 | def read(self, size=-1) -> bytes: 46 | return self._io.read(size) 47 | 48 | # implemented read1 method using aliasing. 49 | read1 = read 50 | 51 | def readable(self) -> bool: 52 | return True 53 | 54 | def seekable(self) -> bool: 55 | return False 56 | 57 | def writable(self) -> bool: 58 | return False 59 | 60 | 61 | class BlobConverter(meta.InConverter, 62 | meta.OutConverter, 63 | binding='blob', 64 | trigger='blobTrigger'): 65 | @classmethod 66 | def check_input_type_annotation(cls, pytype: type) -> bool: 67 | return issubclass(pytype, (azf_abc.InputStream, bytes, str)) 68 | 69 | @classmethod 70 | def check_output_type_annotation(cls, pytype: type) -> bool: 71 | return ( 72 | issubclass(pytype, (str, bytes, bytearray, azf_abc.InputStream)) 73 | or callable(getattr(pytype, 'read', None)) 74 | ) 75 | 76 | @classmethod 77 | def encode(cls, obj: Any, *, 78 | expected_type: Optional[type]) -> meta.Datum: 79 | if callable(getattr(obj, 'read', None)): 80 | # file-like object 81 | obj = obj.read() 82 | 83 | if isinstance(obj, str): 84 | return meta.Datum(type='string', value=obj) 85 | 86 | elif isinstance(obj, (bytes, bytearray)): 87 | return meta.Datum(type='bytes', value=bytes(obj)) 88 | 89 | else: 90 | raise NotImplementedError 91 | 92 | @classmethod 93 | def decode(cls, data: meta.Datum, *, trigger_metadata) -> Any: 94 | if data is None or data.type is None: 95 | return None 96 | 97 | data_type = data.type 98 | 99 | if data_type == 'string': 100 | data = data.value.encode('utf-8') 101 | elif data_type == 'bytes': 102 | data = data.value 103 | else: 104 | raise ValueError( 105 | f'unexpected type of data received for the "blob" binding ' 106 | f': {data_type!r}' 107 | ) 108 | 109 | if not trigger_metadata: 110 | return InputStream(data=data) 111 | else: 112 | properties = cls._decode_trigger_metadata_field( 113 | trigger_metadata, 'Properties', python_type=dict) 114 | if properties: 115 | blob_properties = properties 116 | length = properties.get('ContentLength') or \ 117 | properties.get('Length') 118 | length = int(length) if length else None 119 | else: 120 | blob_properties = None 121 | length = None 122 | 123 | metadata = None 124 | try: 125 | metadata = cls._decode_trigger_metadata_field(trigger_metadata, 126 | 'Metadata', 127 | python_type=dict) 128 | except (KeyError, ValueError): 129 | # avoiding any exceptions when fetching Metadata as the 130 | # metadata type is unclear. 131 | pass 132 | 133 | return InputStream( 134 | data=data, 135 | name=cls._decode_trigger_metadata_field( 136 | trigger_metadata, 'BlobTrigger', python_type=str), 137 | length=length, 138 | uri=cls._decode_trigger_metadata_field( 139 | trigger_metadata, 'Uri', python_type=str), 140 | blob_properties=blob_properties, 141 | metadata=metadata 142 | ) 143 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Functions Header Image - Lightning Logo Azure Functions Python Library 2 | 3 | | Branch | CodeCov | 4 | |--------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------| 5 | | master | [![codecov](https://codecov.io/gh/Azure/azure-functions-python-library/branch/master/graph/badge.svg)](https://codecov.io/gh/Azure/azure-functions-python-library) | 6 | | dev | [![codecov](https://codecov.io/gh/Azure/azure-functions-python-library/branch/dev/graph/badge.svg)](https://codecov.io/gh/Azure/azure-functions-python-library) | 7 | 8 | ## Overview 9 | 10 | Python support for Azure Functions is based on Python 3.10, 3.11, 3.12, and 3.13 serverless hosting on Linux, and the Functions 2.x ([EOL](https://learn.microsoft.com/azure/azure-functions/functions-versions?#retired-versions)), 3.x ([EOL](https://learn.microsoft.com/azure/azure-functions/functions-versions?#retired-versions)) and 4.0 runtime. 11 | 12 | Here is the current status of Python in Azure Functions: 13 | 14 | _What are the supported Python versions?_ 15 | 16 | | Azure Functions Runtime | Python 3.10 | Python 3.11 | Python 3.12 | Python 3.13 | 17 | |----------------------------------|------------|------------|-------------|-------------| 18 | | Azure Functions 4.0 | ✔ | ✔ | ✔ | ✔ | | ✓ | ✓ | ✓ | 19 | 20 | _What's available?_ 21 | - Build, test, debug and publish using Azure Functions Core Tools (CLI) or Visual Studio Code 22 | - Triggers / Bindings : Blob, Cosmos DB, Event Grid, Event Hub, HTTP, Kafka, MySQL, Queue, ServiceBus, SQL, Timer, and Warmup 23 | - Create a Python Function on Linux using a custom docker image 24 | - Triggers / Bindings : Custom binding support 25 | 26 | #### Get Started 27 | 28 | - [Create your first Python function](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-python) 29 | - [Developer guide](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python) 30 | - [Binding API reference](https://docs.microsoft.com/en-us/python/api/azure-functions/azure.functions?view=azure-python) 31 | - [Develop using VS Code](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-vs-code) 32 | - [Create a Python Function on Linux using a custom docker image](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-function-linux-custom-image) 33 | 34 | #### Give Feedback 35 | 36 | Issues and feature requests are tracked in a variety of places. To report this feedback, please file an issue to the relevant repository below: 37 | 38 | | Item | Description | Link | 39 | |---------------|----------------------------------------------|--------------------------------------------------------------------------------| 40 | | Python Worker | Programming Model, Triggers & Bindings | [File an Issue](https://github.com/Azure/azure-functions-python-worker/issues) | 41 | | Linux | Base Docker Images | [File an Issue](https://github.com/Azure/azure-functions-docker/issues) | 42 | | Runtime | Script Host & Language Extensibility | [File an Issue](https://github.com/Azure/azure-functions-host/issues) | 43 | | VSCode | VSCode Extension for Azure Functions | [File an Issue](https://github.com/microsoft/vscode-azurefunctions/issues) | 44 | | Core Tools | Command Line Interface for Local Development | [File an Issue](https://github.com/Azure/azure-functions-core-tools/issues) | 45 | | Portal | User Interface or Experience Issue | [File an Issue](https://github.com/azure/azure-functions-ux/issues) | 46 | | Templates | Code Issues with Creation Template | [File an Issue](https://github.com/Azure/azure-functions-templates/issues) | 47 | 48 | ## Contributing 49 | 50 | This project welcomes contributions and suggestions. Most contributions require you to agree to a 51 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us 52 | the rights to use your contribution. For details, visit https://cla.microsoft.com. 53 | 54 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide 55 | a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions 56 | provided by the bot. You will only need to do this once across all repos using our CLA. 57 | 58 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 59 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or 60 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 61 | -------------------------------------------------------------------------------- /tests/decorators/test_blob.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.blob import BlobTrigger, BlobOutput, BlobInput 6 | from azure.functions.decorators.core import BindingDirection, BlobSource, \ 7 | DataType 8 | 9 | 10 | class TestBlob(unittest.TestCase): 11 | def test_blob_trigger_creation_with_no_source(self): 12 | trigger = BlobTrigger(name="req", 13 | path="dummy_path", 14 | connection="dummy_connection", 15 | data_type=DataType.UNDEFINED, 16 | dummy_field="dummy") 17 | 18 | self.assertEqual(trigger.get_binding_name(), "blobTrigger") 19 | self.assertEqual(trigger.get_dict_repr(), { 20 | "type": "blobTrigger", 21 | "direction": BindingDirection.IN, 22 | 'dummyField': 'dummy', 23 | "name": "req", 24 | "dataType": DataType.UNDEFINED, 25 | "path": "dummy_path", 26 | "connection": "dummy_connection" 27 | }) 28 | 29 | def test_blob_trigger_creation_with_default_specified_source(self): 30 | trigger = BlobTrigger(name="req", 31 | path="dummy_path", 32 | connection="dummy_connection", 33 | source=BlobSource.LOGS_AND_CONTAINER_SCAN, 34 | data_type=DataType.UNDEFINED, 35 | dummy_field="dummy") 36 | 37 | self.assertEqual(trigger.get_binding_name(), "blobTrigger") 38 | self.assertEqual(trigger.get_dict_repr(), { 39 | "type": "blobTrigger", 40 | "direction": BindingDirection.IN, 41 | 'dummyField': 'dummy', 42 | "name": "req", 43 | "dataType": DataType.UNDEFINED, 44 | "path": "dummy_path", 45 | 'source': 'LogsAndContainerScan', 46 | "connection": "dummy_connection" 47 | }) 48 | 49 | def test_blob_trigger_creation_with_source_as_string(self): 50 | trigger = BlobTrigger(name="req", 51 | path="dummy_path", 52 | connection="dummy_connection", 53 | source="EventGrid", 54 | data_type=DataType.UNDEFINED, 55 | dummy_field="dummy") 56 | 57 | self.assertEqual(trigger.get_binding_name(), "blobTrigger") 58 | self.assertEqual(trigger.get_dict_repr(), { 59 | "type": "blobTrigger", 60 | "direction": BindingDirection.IN, 61 | 'dummyField': 'dummy', 62 | "name": "req", 63 | "dataType": DataType.UNDEFINED, 64 | "path": "dummy_path", 65 | 'source': 'EventGrid', 66 | "connection": "dummy_connection" 67 | }) 68 | 69 | def test_blob_trigger_creation_with_source_as_enum(self): 70 | trigger = BlobTrigger(name="req", 71 | path="dummy_path", 72 | connection="dummy_connection", 73 | source=BlobSource.EVENT_GRID, 74 | data_type=DataType.UNDEFINED, 75 | dummy_field="dummy") 76 | 77 | self.assertEqual(trigger.get_binding_name(), "blobTrigger") 78 | self.assertEqual(trigger.get_dict_repr(), { 79 | "type": "blobTrigger", 80 | "direction": BindingDirection.IN, 81 | 'dummyField': 'dummy', 82 | "name": "req", 83 | "dataType": DataType.UNDEFINED, 84 | "path": "dummy_path", 85 | 'source': 'EventGrid', 86 | "connection": "dummy_connection" 87 | }) 88 | 89 | def test_blob_input_valid_creation(self): 90 | blob_input = BlobInput(name="res", 91 | path="dummy_path", 92 | connection="dummy_connection", 93 | data_type=DataType.UNDEFINED, 94 | dummy_field="dummy") 95 | 96 | self.assertEqual(blob_input.get_binding_name(), "blob") 97 | self.assertEqual(blob_input.get_dict_repr(), { 98 | "type": "blob", 99 | "direction": BindingDirection.IN, 100 | 'dummyField': 'dummy', 101 | "name": "res", 102 | "dataType": DataType.UNDEFINED, 103 | "path": "dummy_path", 104 | "connection": "dummy_connection" 105 | }) 106 | 107 | def test_blob_output_valid_creation(self): 108 | blob_output = BlobOutput(name="res", 109 | path="dummy_path", 110 | connection="dummy_connection", 111 | data_type=DataType.UNDEFINED, 112 | dummy_field="dummy") 113 | 114 | self.assertEqual(blob_output.get_binding_name(), "blob") 115 | self.assertEqual(blob_output.get_dict_repr(), { 116 | "type": "blob", 117 | "direction": BindingDirection.OUT, 118 | 'dummyField': 'dummy', 119 | "name": "res", 120 | "dataType": DataType.UNDEFINED, 121 | "path": "dummy_path", 122 | "connection": "dummy_connection" 123 | }) 124 | -------------------------------------------------------------------------------- /azure/functions/queue.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import collections.abc 5 | import datetime 6 | from typing import List, Dict, Any, Union, Optional 7 | 8 | from azure.functions import _abc as azf_abc 9 | from azure.functions import _queue as azf_queue 10 | 11 | from . import meta 12 | from ._jsonutils import json 13 | 14 | 15 | class QueueMessage(azf_queue.QueueMessage): 16 | """An HTTP response object.""" 17 | 18 | def __init__(self, *, 19 | id=None, body=None, 20 | dequeue_count=None, 21 | expiration_time=None, 22 | insertion_time=None, 23 | time_next_visible=None, 24 | pop_receipt=None): 25 | super().__init__(id=id, body=body, pop_receipt=pop_receipt) 26 | self.__dequeue_count = dequeue_count 27 | self.__expiration_time = expiration_time 28 | self.__insertion_time = insertion_time 29 | self.__time_next_visible = time_next_visible 30 | 31 | @property 32 | def dequeue_count(self): 33 | return self.__dequeue_count 34 | 35 | @property 36 | def expiration_time(self): 37 | return self.__expiration_time 38 | 39 | @property 40 | def insertion_time(self): 41 | return self.__insertion_time 42 | 43 | @property 44 | def time_next_visible(self): 45 | return self.__time_next_visible 46 | 47 | def __repr__(self) -> str: 48 | return ( 49 | f'' 54 | ) 55 | 56 | 57 | class QueueMessageInConverter(meta.InConverter, 58 | binding='queueTrigger', trigger=True): 59 | 60 | @classmethod 61 | def check_input_type_annotation(cls, pytype: type) -> bool: 62 | return issubclass(pytype, azf_abc.QueueMessage) 63 | 64 | @classmethod 65 | def decode(cls, data: meta.Datum, *, 66 | trigger_metadata) -> Any: 67 | data_type = data.type 68 | 69 | if data_type in ['string', 'bytes']: 70 | body = data.value 71 | 72 | else: 73 | raise NotImplementedError( 74 | f'unsupported queue payload type: {data_type}') 75 | 76 | if trigger_metadata is None: 77 | raise NotImplementedError( 78 | 'missing trigger metadata for queue input') 79 | 80 | return QueueMessage( 81 | id=cls._decode_trigger_metadata_field( 82 | trigger_metadata, 'Id', python_type=str), 83 | body=body, 84 | dequeue_count=cls._decode_trigger_metadata_field( 85 | trigger_metadata, 'DequeueCount', python_type=int), 86 | expiration_time=cls._parse_datetime_metadata( 87 | trigger_metadata, 'ExpirationTime'), 88 | insertion_time=cls._parse_datetime_metadata( 89 | trigger_metadata, 'InsertionTime'), 90 | time_next_visible=cls._parse_datetime_metadata( 91 | trigger_metadata, 'NextVisibleTime'), 92 | pop_receipt=cls._decode_trigger_metadata_field( 93 | trigger_metadata, 'PopReceipt', python_type=str) 94 | ) 95 | 96 | 97 | class QueueMessageOutConverter(meta.OutConverter, binding='queue'): 98 | 99 | @classmethod 100 | def check_output_type_annotation(cls, pytype: type) -> bool: 101 | valid_types = (azf_abc.QueueMessage, str, bytes) 102 | return ( 103 | meta.is_iterable_type_annotation(pytype, valid_types) 104 | or (isinstance(pytype, type) and issubclass(pytype, valid_types)) 105 | ) 106 | 107 | @classmethod 108 | def encode(cls, obj: Any, *, 109 | expected_type: Optional[type]) -> meta.Datum: 110 | if isinstance(obj, str): 111 | return meta.Datum(type='string', value=obj) 112 | 113 | elif isinstance(obj, bytes): 114 | return meta.Datum(type='bytes', value=obj) 115 | 116 | elif isinstance(obj, azf_queue.QueueMessage): 117 | return meta.Datum( 118 | type='json', 119 | value=json.dumps({ 120 | 'id': obj.id, 121 | 'body': obj.get_body().decode('utf-8'), 122 | }) 123 | ) 124 | 125 | elif isinstance(obj, collections.abc.Iterable): 126 | msgs: List[Union[str, Dict]] = [] 127 | for item in obj: 128 | if isinstance(item, str): 129 | msgs.append(item) 130 | elif isinstance(item, azf_queue.QueueMessage): 131 | msgs.append({ 132 | 'id': item.id, 133 | 'body': item.get_body().decode('utf-8') 134 | }) 135 | else: 136 | raise NotImplementedError( 137 | 'invalid data type in output ' 138 | 'queue message list: {}'.format(type(item))) 139 | 140 | return meta.Datum( 141 | type='json', 142 | value=json.dumps(msgs) 143 | ) 144 | 145 | raise NotImplementedError 146 | 147 | @classmethod 148 | def _format_datetime(cls, dt: Optional[datetime.datetime]): 149 | if dt is None: 150 | return None 151 | else: 152 | return dt.isoformat() 153 | -------------------------------------------------------------------------------- /tests/decorators/test_core.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import unittest 5 | 6 | from azure.functions.decorators.core import BindingDirection, DataType, \ 7 | InputBinding, OutputBinding, Trigger, Setting 8 | 9 | 10 | class DummyTrigger(Trigger): 11 | @staticmethod 12 | def get_binding_name() -> str: 13 | return "Dummy" 14 | 15 | def __init__(self, 16 | name: str, 17 | data_type: DataType = DataType.UNDEFINED, 18 | **kwargs): 19 | super().__init__(name=name, data_type=data_type) 20 | 21 | 22 | class DummyInputBinding(InputBinding): 23 | @staticmethod 24 | def get_binding_name() -> str: 25 | return "DummyInputBinding" 26 | 27 | def __init__(self, 28 | name: str, 29 | data_type: DataType = DataType.UNDEFINED, 30 | **kwargs): 31 | super().__init__(name=name, data_type=data_type) 32 | 33 | 34 | class DummySetting(Setting): 35 | 36 | def __init__(self, setting_name: str) -> None: 37 | super().__init__(setting_name=setting_name) 38 | 39 | 40 | class DummyOutputBinding(OutputBinding): 41 | @staticmethod 42 | def get_binding_name() -> str: 43 | return "DummyOutputBinding" 44 | 45 | def __init__(self, 46 | name: str, 47 | data_type: DataType = DataType.UNDEFINED, 48 | **kwargs): 49 | super().__init__(name=name, data_type=data_type) 50 | 51 | 52 | class TestBindings(unittest.TestCase): 53 | def test_trigger_creation(self): 54 | test_trigger = DummyTrigger(name="dummy", data_type=DataType.UNDEFINED) 55 | 56 | expected_dict = {'dataType': DataType.UNDEFINED, 57 | 'direction': BindingDirection.IN, 58 | 'name': 'dummy', 59 | 'type': 'Dummy'} 60 | self.assertEqual(test_trigger.get_binding_name(), "Dummy") 61 | self.assertEqual(test_trigger.get_dict_repr(), expected_dict) 62 | 63 | def test_param_direction_unset(self): 64 | test_trigger = DummyTrigger(name="dummy", data_type=DataType.UNDEFINED, 65 | direction="dummy", type="hello") 66 | 67 | expected_dict = {'dataType': DataType.UNDEFINED, 68 | 'direction': BindingDirection.IN, 69 | 'name': 'dummy', 70 | 'type': 'Dummy'} 71 | self.assertEqual(test_trigger.get_binding_name(), "Dummy") 72 | self.assertEqual(test_trigger.get_dict_repr(), expected_dict) 73 | 74 | def test_input_creation(self): 75 | test_input = DummyInputBinding(name="dummy", 76 | data_type=DataType.UNDEFINED) 77 | 78 | expected_dict = {'dataType': DataType.UNDEFINED, 79 | 'direction': BindingDirection.IN, 80 | 'name': 'dummy', 81 | 'type': 'DummyInputBinding'} 82 | 83 | self.assertEqual(test_input.get_binding_name(), "DummyInputBinding") 84 | self.assertEqual(test_input.get_dict_repr(), expected_dict) 85 | 86 | def test_output_creation(self): 87 | test_output = DummyOutputBinding(name="dummy", 88 | data_type=DataType.UNDEFINED) 89 | 90 | expected_dict = {'dataType': DataType.UNDEFINED, 91 | 'direction': BindingDirection.OUT, 92 | 'name': 'dummy', 93 | 'type': 'DummyOutputBinding'} 94 | 95 | self.assertEqual(test_output.get_binding_name(), "DummyOutputBinding") 96 | self.assertEqual(test_output.get_dict_repr(), expected_dict) 97 | 98 | def test_supported_trigger_types_populated(self): 99 | for supported_trigger in Trigger.__subclasses__(): 100 | trigger_name = supported_trigger.__name__ 101 | if trigger_name != "GenericTrigger": 102 | trigger_type_name = supported_trigger.get_binding_name() 103 | self.assertTrue(trigger_type_name is not None, 104 | f"binding_type {trigger_name} can not be " 105 | f"None!") 106 | self.assertTrue(len(trigger_type_name) > 0, 107 | f"binding_type {trigger_name} can not be " 108 | f"empty str!") 109 | 110 | 111 | class TestSettings(unittest.TestCase): 112 | 113 | def test_setting_creation(self): 114 | """ 115 | Tests that the setting_name is set correctly 116 | """ 117 | # DummySetting is a test setting that inherits from Setting 118 | test_setting = DummySetting(setting_name="TestSetting") 119 | self.assertEqual(test_setting.get_setting_name(), "TestSetting") 120 | 121 | def test_get_dict_repr(self): 122 | """ 123 | Tests that the get_dict_repr method returns the correct dict 124 | when a new setting is intialized 125 | """ 126 | 127 | class NewSetting(DummySetting): 128 | 129 | def __init__(self, name: str): 130 | self.name = name 131 | super().__init__(setting_name="TestSetting") 132 | 133 | test_setting = NewSetting(name="NewSetting") 134 | 135 | expected_dict = {'setting_name': "TestSetting", "name": "NewSetting"} 136 | 137 | self.assertEqual(test_setting.get_dict_repr(), expected_dict) 138 | self.assertEqual(test_setting.get_settings_value("name"), "NewSetting") 139 | -------------------------------------------------------------------------------- /azure/functions/extension/app_extension_base.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import typing 5 | from logging import Logger 6 | from .extension_meta import ExtensionMeta 7 | from .extension_scope import ExtensionScope 8 | from .._abc import Context 9 | 10 | 11 | class AppExtensionBase(metaclass=ExtensionMeta): 12 | """An abstract class defines the global life-cycle hooks to be implemented 13 | by customer's extension, will be applied to all functions. 14 | 15 | An AppExtension should be treated as a static class. Must not contain 16 | __init__ method since it is not instantiable. 17 | 18 | Please place your initialization code in init() classmethod, consider 19 | accepting extension settings in configure() classmethod from customers. 20 | """ 21 | 22 | _scope = ExtensionScope.APPLICATION 23 | 24 | @classmethod 25 | def init(cls): 26 | """The function will be executed when the extension is loaded. 27 | Happens when Azure Functions customers import the extension module. 28 | """ 29 | pass 30 | 31 | @classmethod 32 | def configure(cls, *args, **kwargs): 33 | """This function is intended to be called by Azure Functions 34 | customers. This is a contract between extension developers and 35 | azure functions customers. If multiple .configure() are called, 36 | the extension system cannot guarantee the calling order. 37 | """ 38 | pass 39 | 40 | # DO NOT decorate this with @abc.abstractstatismethod 41 | # since implementation by subclass is not mandatory 42 | @classmethod 43 | def post_function_load_app_level(cls, 44 | function_name: str, 45 | function_directory: str, 46 | *args, **kwargs) -> None: 47 | """This must be implemented as a @classmethod. It will be called right 48 | after a customer's function is loaded. In this stage, the customer's 49 | logger is not fully initialized from the Python worker. Please use 50 | print() to emit a message if necessary. 51 | 52 | Parameters 53 | ---------- 54 | function_name: str 55 | The name of customer's function (e.g. HttpTrigger) 56 | function_directory: str 57 | The path to customer's function directory 58 | (e.g. /home/site/wwwroot/HttpTrigger) 59 | """ 60 | pass 61 | 62 | # DO NOT decorate this with @abc.abstractstatismethod 63 | # since implementation by subclass is not mandatory 64 | @classmethod 65 | def pre_invocation_app_level( 66 | cls, 67 | logger: Logger, 68 | context: Context, 69 | func_args: typing.Optional[typing.Dict[str, object]] = None, 70 | *args, 71 | **kwargs) -> None: 72 | """This must be implemented as a @staticmethod. It will be called right 73 | before a customer's function is being executed. 74 | 75 | Parameters 76 | ---------- 77 | logger: logging.Logger 78 | A logger provided by Python worker. Extension developer should 79 | use this logger to emit telemetry to Azure Functions customers. 80 | context: azure.functions.Context 81 | This will include the function_name, function_directory and an 82 | invocation_id of this specific invocation. 83 | func_args: typing.Dict[str, object] 84 | Arguments that are passed into the Azure Functions. The name of 85 | each parameter is defined in function.json. Extension developers 86 | may also want to do isinstance() check if you want to apply 87 | operations to specific trigger types or input binding types. 88 | """ 89 | pass 90 | 91 | # DO NOT decorate this with @abc.abstractstatismethod 92 | # since implementation by subclass is not mandatory 93 | @classmethod 94 | def post_invocation_app_level( 95 | cls, 96 | logger: Logger, 97 | context: Context, 98 | func_args: typing.Optional[typing.Dict[str, object]] = None, 99 | func_ret: typing.Optional[object] = None, 100 | *args, 101 | **kwargs) -> None: 102 | """This must be implemented as a @staticmethod. It will be called right 103 | after a customer's function is being executed. 104 | 105 | Parameters 106 | ---------- 107 | logger: logging.Logger 108 | A logger provided by Python worker. Extension developer should 109 | use this logger to emit telemetry to Azure Functions customers. 110 | context: azure.functions.Context 111 | This will include the function_name, function_directory and an 112 | invocation_id of this specific invocation. 113 | func_args: typing.Dict[str, object] 114 | Arguments that are passed into the Azure Functions. The name of 115 | each parameter is defined in function.json. Extension developers 116 | may also want to do isinstance() check if you want to apply 117 | operations to specific trigger types or input binding types. 118 | func_ret: typing.Optional[object] 119 | Return value from Azure Functions. This is usually the value 120 | defined in function.json $return section. Extension developers 121 | may also want to do isinstance() check if you want to apply 122 | operations to specific types or input binding types." 123 | """ 124 | pass 125 | -------------------------------------------------------------------------------- /azure/functions/decorators/dapr.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | from typing import Optional 4 | 5 | from azure.functions.decorators.constants import DAPR_BINDING, DAPR_INVOKE, \ 6 | DAPR_PUBLISH, DAPR_SECRET, DAPR_SERVICE_INVOCATION_TRIGGER, \ 7 | DAPR_BINDING_TRIGGER, DAPR_STATE, DAPR_TOPIC_TRIGGER 8 | from azure.functions.decorators.core import InputBinding, Trigger, DataType, \ 9 | OutputBinding 10 | 11 | 12 | class DaprServiceInvocationTrigger(Trigger): 13 | 14 | @staticmethod 15 | def get_binding_name() -> str: 16 | return DAPR_SERVICE_INVOCATION_TRIGGER 17 | 18 | def __init__(self, 19 | name: str, 20 | method_name: str, 21 | data_type: Optional[DataType] = None, 22 | **kwargs): 23 | self.method_name = method_name 24 | super().__init__(name=name, data_type=data_type) 25 | 26 | 27 | class DaprBindingTrigger(Trigger): 28 | 29 | @staticmethod 30 | def get_binding_name() -> str: 31 | return DAPR_BINDING_TRIGGER 32 | 33 | def __init__(self, 34 | name: str, 35 | binding_name: str, 36 | data_type: Optional[DataType] = None, 37 | **kwargs): 38 | self.binding_name = binding_name 39 | super().__init__(name=name, data_type=data_type) 40 | 41 | 42 | class DaprTopicTrigger(Trigger): 43 | 44 | @staticmethod 45 | def get_binding_name() -> str: 46 | return DAPR_TOPIC_TRIGGER 47 | 48 | def __init__(self, 49 | name: str, 50 | pub_sub_name: str, 51 | topic: str, 52 | route: Optional[str] = None, 53 | data_type: Optional[DataType] = None, 54 | **kwargs): 55 | self.pub_sub_name = pub_sub_name 56 | self.topic = topic 57 | self.route = route 58 | super().__init__(name=name, data_type=data_type) 59 | 60 | 61 | class DaprStateInput(InputBinding): 62 | @staticmethod 63 | def get_binding_name() -> str: 64 | return DAPR_STATE 65 | 66 | def __init__(self, 67 | name: str, 68 | state_store: str, 69 | key: str, 70 | dapr_address: Optional[str], 71 | data_type: Optional[DataType] = None, 72 | **kwargs): 73 | self.state_store = state_store 74 | self.key = key 75 | self.dapr_address = dapr_address 76 | super().__init__(name=name, data_type=data_type) 77 | 78 | 79 | class DaprSecretInput(InputBinding): 80 | @staticmethod 81 | def get_binding_name() -> str: 82 | return DAPR_SECRET 83 | 84 | def __init__(self, 85 | name: str, 86 | secret_store_name: str, 87 | key: str, 88 | metadata: str, 89 | dapr_address: Optional[str], 90 | data_type: Optional[DataType] = None, 91 | **kwargs): 92 | self.secret_store_name = secret_store_name 93 | self.key = key 94 | self.metadata = metadata 95 | self.dapr_address = dapr_address 96 | super().__init__(name=name, data_type=data_type) 97 | 98 | 99 | class DaprStateOutput(OutputBinding): 100 | @staticmethod 101 | def get_binding_name() -> str: 102 | return DAPR_STATE 103 | 104 | def __init__(self, 105 | name: str, 106 | state_store: str, 107 | key: str, 108 | dapr_address: Optional[str], 109 | data_type: Optional[DataType] = None, 110 | **kwargs): 111 | self.state_store = state_store 112 | self.key = key 113 | self.dapr_address = dapr_address 114 | super().__init__(name=name, data_type=data_type) 115 | 116 | 117 | class DaprInvokeOutput(OutputBinding): 118 | @staticmethod 119 | def get_binding_name() -> str: 120 | return DAPR_INVOKE 121 | 122 | def __init__(self, 123 | name: str, 124 | app_id: str, 125 | method_name: str, 126 | http_verb: str, 127 | dapr_address: Optional[str], 128 | data_type: Optional[DataType] = None, 129 | **kwargs): 130 | self.app_id = app_id 131 | self.method_name = method_name 132 | self.http_verb = http_verb 133 | self.dapr_address = dapr_address 134 | super().__init__(name=name, data_type=data_type) 135 | 136 | 137 | class DaprPublishOutput(OutputBinding): 138 | @staticmethod 139 | def get_binding_name() -> str: 140 | return DAPR_PUBLISH 141 | 142 | def __init__(self, 143 | name: str, 144 | pub_sub_name: str, 145 | topic: str, 146 | dapr_address: Optional[str], 147 | data_type: Optional[DataType] = None, 148 | **kwargs): 149 | self.pub_sub_name = pub_sub_name 150 | self.topic = topic 151 | self.dapr_address = dapr_address 152 | super().__init__(name=name, data_type=data_type) 153 | 154 | 155 | class DaprBindingOutput(OutputBinding): 156 | @staticmethod 157 | def get_binding_name() -> str: 158 | return DAPR_BINDING 159 | 160 | def __init__(self, 161 | name: str, 162 | binding_name: str, 163 | operation: str, 164 | dapr_address: Optional[str], 165 | data_type: Optional[DataType] = None, 166 | **kwargs): 167 | self.binding_name = binding_name 168 | self.operation = operation 169 | self.dapr_address = dapr_address 170 | super().__init__(name=name, data_type=data_type) 171 | -------------------------------------------------------------------------------- /tests/decorators/test_kafka.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import unittest 4 | 5 | from azure.functions.decorators.constants import KAFKA_TRIGGER, KAFKA 6 | from azure.functions.decorators.core import BindingDirection, Cardinality, \ 7 | DataType 8 | from azure.functions.decorators.kafka import KafkaTrigger, KafkaOutput, \ 9 | BrokerAuthenticationMode, BrokerProtocol 10 | 11 | 12 | class TestKafka(unittest.TestCase): 13 | def test_kafka_trigger_valid_creation(self): 14 | trigger = KafkaTrigger(name="arg_name", 15 | topic="topic", 16 | broker_list="broker_list", 17 | event_hub_connection_string="ehcs", 18 | consumer_group="consumer_group", 19 | avro_schema="avro_schema", 20 | username="username", 21 | password="password", 22 | ssl_key_location="ssl_key_location", 23 | ssl_ca_location="ssl_ca_location", 24 | ssl_certificate_location="scl", 25 | ssl_key_password="ssl_key_password", 26 | schema_registry_url="srurl", 27 | schema_registry_username="", 28 | schema_registry_password="srp", 29 | authentication_mode=BrokerAuthenticationMode.PLAIN, # noqa: E501 30 | data_type=DataType.UNDEFINED, 31 | dummy_field="dummy") 32 | 33 | self.assertEqual(trigger.get_binding_name(), "kafkaTrigger") 34 | self.assertEqual(trigger.get_dict_repr(), 35 | {"authenticationMode": BrokerAuthenticationMode.PLAIN, 36 | "avroSchema": "avro_schema", 37 | "brokerList": "broker_list", 38 | "consumerGroup": "consumer_group", 39 | "dataType": DataType.UNDEFINED, 40 | "direction": BindingDirection.IN, 41 | "dummyField": "dummy", 42 | "eventHubConnectionString": "ehcs", 43 | "lagThreshold": 1000, 44 | "name": "arg_name", 45 | "password": "password", 46 | "protocol": BrokerProtocol.NOTSET, 47 | "schemaRegistryPassword": "srp", 48 | "schemaRegistryUrl": "srurl", 49 | "schemaRegistryUsername": "", 50 | "sslCaLocation": "ssl_ca_location", 51 | "sslCertificateLocation": "scl", 52 | "sslKeyLocation": "ssl_key_location", 53 | "sslKeyPassword": "ssl_key_password", 54 | "topic": "topic", 55 | "cardinality": Cardinality.ONE, 56 | "type": KAFKA_TRIGGER, 57 | "username": "username"}) 58 | 59 | def test_kafka_output_valid_creation(self): 60 | output = KafkaOutput(name="arg_name", 61 | topic="topic", 62 | broker_list="broker_list", 63 | avro_schema="avro_schema", 64 | username="username", 65 | password="password", 66 | ssl_key_location="ssl_key_location", 67 | ssl_ca_location="ssl_ca_location", 68 | ssl_certificate_location="scl", 69 | ssl_key_password="ssl_key_password", 70 | schema_registry_url="schema_registry_url", 71 | schema_registry_username="", 72 | schema_registry_password="srp", 73 | max_retries=10, 74 | data_type=DataType.UNDEFINED, 75 | dummy_field="dummy") 76 | 77 | self.assertEqual(output.get_binding_name(), "kafka") 78 | self.assertEqual(output.get_dict_repr(), 79 | {'authenticationMode': BrokerAuthenticationMode.NOTSET, # noqa: E501 80 | 'avroSchema': 'avro_schema', 81 | 'batchSize': 10000, 82 | 'brokerList': 'broker_list', 83 | 'dataType': DataType.UNDEFINED, 84 | 'direction': BindingDirection.OUT, 85 | 'dummyField': 'dummy', 86 | 'enableIdempotence': False, 87 | 'lingerMs': 5, 88 | 'maxMessageBytes': 1000000, 89 | 'maxRetries': 10, 90 | 'messageTimeoutMs': 300000, 91 | 'name': 'arg_name', 92 | 'password': 'password', 93 | 'protocol': BrokerProtocol.NOTSET, 94 | 'requestTimeoutMs': 5000, 95 | 'schemaRegistryPassword': 'srp', 96 | 'schemaRegistryUrl': 'schema_registry_url', 97 | 'schemaRegistryUsername': '', 98 | 'sslCaLocation': 'ssl_ca_location', 99 | 'sslCertificateLocation': 'scl', 100 | 'sslKeyLocation': 'ssl_key_location', 101 | 'sslKeyPassword': 'ssl_key_password', 102 | 'topic': 'topic', 103 | 'type': KAFKA, 104 | 'username': 'username'}) 105 | -------------------------------------------------------------------------------- /azure/functions/http.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import logging 5 | import sys 6 | import typing 7 | from http.cookies import SimpleCookie 8 | 9 | from azure.functions import _abc as azf_abc 10 | from azure.functions import _http as azf_http 11 | from . import meta 12 | from ._jsonutils import json 13 | from werkzeug.datastructures import Headers 14 | 15 | 16 | class HttpRequest(azf_http.HttpRequest): 17 | """An HTTP request object.""" 18 | 19 | __body_bytes: typing.Optional[bytes] 20 | __body_str: typing.Optional[str] 21 | 22 | def __init__(self, 23 | method: str, 24 | url: str, *, 25 | headers: typing.Mapping[str, str], 26 | params: typing.Mapping[str, str], 27 | route_params: typing.Mapping[str, str], 28 | body_type: str, 29 | body: typing.Union[str, bytes]) -> None: 30 | 31 | body_str: typing.Optional[str] = None 32 | body_bytes: typing.Optional[bytes] = None 33 | if isinstance(body, str): 34 | body_str = body 35 | body_bytes = body_str.encode('utf-8') 36 | elif isinstance(body, bytes): 37 | body_bytes = body 38 | else: 39 | raise TypeError( 40 | f'unexpected HTTP request body type: {type(body).__name__}') 41 | 42 | super().__init__(method=method, url=url, headers=headers, 43 | params=params, route_params=route_params, 44 | body=body_bytes) 45 | 46 | self.__body_type = body_type 47 | self.__body_str = body_str 48 | self.__body_bytes = body_bytes 49 | 50 | def get_body(self) -> bytes: 51 | if self.__body_bytes is None: 52 | assert self.__body_str is not None 53 | self.__body_bytes = self.__body_str.encode('utf-8') 54 | return self.__body_bytes 55 | 56 | def get_json(self) -> typing.Any: 57 | if self.__body_type in ('json', 'string'): 58 | assert self.__body_str is not None 59 | return json.loads(self.__body_str) 60 | elif self.__body_bytes is not None: 61 | try: 62 | return json.loads(self.__body_bytes.decode('utf-8')) 63 | except ValueError as e: 64 | raise ValueError( 65 | 'HTTP request does not contain valid JSON data') from e 66 | else: 67 | raise ValueError( 68 | 'Request body cannot be empty in JSON deserialization') 69 | 70 | 71 | class HttpResponseConverter(meta.OutConverter, binding='http'): 72 | 73 | @classmethod 74 | def check_output_type_annotation(cls, pytype: type) -> bool: 75 | return issubclass(pytype, (azf_abc.HttpResponse, str)) 76 | 77 | @classmethod 78 | def encode(cls, obj: typing.Any, *, 79 | expected_type: typing.Optional[type]) -> meta.Datum: 80 | if isinstance(obj, str): 81 | return meta.Datum(type='string', value=obj) 82 | 83 | if isinstance(obj, azf_abc.HttpResponse): 84 | status = obj.status_code 85 | headers: Headers = obj.headers 86 | 87 | if 'content-type' not in headers: 88 | if obj.mimetype.startswith('text/'): 89 | ct = f'{obj.mimetype}; charset={obj.charset}' 90 | else: 91 | ct = f'{obj.mimetype}' 92 | headers['content-type'] = ct 93 | 94 | body = obj.get_body() 95 | if body is not None: 96 | datum_body = meta.Datum(type='bytes', value=body) 97 | else: 98 | datum_body = meta.Datum(type='bytes', value=b'') 99 | 100 | cookies = None 101 | 102 | if sys.version_info.major == 3 and sys.version_info.minor <= 7: 103 | # SimpleCookie api in http.cookies - Python Standard Library 104 | # is not supporting 'samesite' in cookie attribute in python 105 | # 3.7 or below and would cause cookie parsing error 106 | # https://docs.python.org/3/library/http.cookies.html 107 | # ?msclkid=d78849ddcd7311ecadd81f2f51d08b8e 108 | logging.warning( 109 | "Setting multiple 'Set-Cookie' response headers is not " 110 | "supported in Azure Python Function with python version " 111 | "3.7, please upgrade to python 3.8 or above.") 112 | else: 113 | if "Set-Cookie" in headers: 114 | cookies = [SimpleCookie(cookie) for cookie in 115 | headers.get_all('Set-Cookie')] 116 | headers.pop("Set-Cookie") 117 | 118 | return meta.Datum( 119 | type='http', 120 | value=dict( 121 | status_code=meta.Datum(type='string', value=str(status)), 122 | headers={ 123 | n: meta.Datum(type='string', value=h) 124 | for n, h in headers.items() 125 | }, 126 | cookies=cookies, 127 | body=datum_body, 128 | ) 129 | ) 130 | 131 | raise NotImplementedError 132 | 133 | 134 | class HttpRequestConverter(meta.InConverter, 135 | binding='httpTrigger', trigger=True): 136 | 137 | @classmethod 138 | def check_input_type_annotation(cls, pytype: type) -> bool: 139 | return issubclass(pytype, azf_abc.HttpRequest) 140 | 141 | @classmethod 142 | def decode(cls, data: meta.Datum, *, 143 | trigger_metadata) -> typing.Any: 144 | if data.type != 'http': 145 | raise NotImplementedError 146 | 147 | val = data.value 148 | 149 | return HttpRequest( 150 | method=val['method'].value, 151 | url=val['url'].value, 152 | headers={n: v.value for n, v in val['headers'].items()}, 153 | params={n: v.value for n, v in val['query'].items()}, 154 | route_params={n: v.value for n, v in val['params'].items()}, 155 | body_type=val['body'].type, 156 | body=val['body'].value, 157 | ) 158 | -------------------------------------------------------------------------------- /azure/functions/decorators/utils.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | import inspect 4 | import re 5 | from abc import ABCMeta 6 | from enum import Enum 7 | from json import JSONEncoder 8 | from typing import Any, TypeVar, Optional, Union, Iterable, Type, Callable 9 | 10 | T = TypeVar("T", bound=Enum) 11 | SNAKE_CASE_RE = re.compile(r'^([a-zA-Z]+\d*_|_+[a-zA-Z\d])\w*$') 12 | WORD_RE = re.compile(r'^([a-zA-Z]+\d*)$') 13 | 14 | 15 | class StringifyEnum(Enum): 16 | """This class output name of enum object when printed as string.""" 17 | 18 | def __str__(self): 19 | return str(self.name) 20 | 21 | 22 | class BuildDictMeta(type): 23 | def __new__(mcs, name, bases, dct): 24 | """BuildDictMeta will apply to every binding. 25 | It will apply :meth:`add_to_dict` decorator to :meth:`__init__` of 26 | every binding class to collect list of params to include in building 27 | json dictionary which corresponds to function.json in legacy app. 28 | It will also apply :meth:`skip_none` to :meth:`get_dict_repr` to 29 | enable json dictionary generated for every binding has non-empty 30 | value fields. It is needed for enabling binding param optionality. 31 | """ 32 | cls = super().__new__(mcs, name, bases, dct) 33 | setattr(cls, '__init__', 34 | cls.add_to_dict(getattr(cls, '__init__'))) 35 | setattr(cls, 'get_dict_repr', 36 | cls.skip_none(getattr(cls, 'get_dict_repr'))) 37 | return cls 38 | 39 | @staticmethod 40 | def skip_none(func): 41 | def wrapper(*args, **kw): 42 | res = func(*args, **kw) 43 | return BuildDictMeta.clean_nones(res) 44 | 45 | return wrapper 46 | 47 | @staticmethod 48 | def add_to_dict(func: Callable[..., Any]): 49 | def wrapper(*args, **kwargs): 50 | if args is None or len(args) == 0: 51 | raise ValueError( 52 | f'{func.__name__} has no args. Please ensure func is an ' 53 | f'object method.') 54 | 55 | func(*args, **kwargs) 56 | 57 | self = args[0] 58 | 59 | init_params = list(inspect.signature(func).parameters.keys()) 60 | init_params.extend(list(kwargs.keys())) 61 | for key in kwargs.keys(): 62 | if not hasattr(self, key): 63 | setattr(self, key, kwargs[key]) 64 | 65 | setattr(self, 'init_params', init_params) 66 | 67 | return wrapper 68 | 69 | @staticmethod 70 | def clean_nones(value): 71 | """ 72 | Recursively remove all None values from dictionaries and lists, 73 | and returns 74 | the result as a new dictionary or list. 75 | """ 76 | if isinstance(value, list): 77 | return [BuildDictMeta.clean_nones(x) for x in value if 78 | x is not None] 79 | elif isinstance(value, dict): 80 | return { 81 | key: BuildDictMeta.clean_nones(val) 82 | for key, val in value.items() 83 | if val is not None 84 | } 85 | else: 86 | return value 87 | 88 | 89 | class ABCBuildDictMeta(ABCMeta, BuildDictMeta): 90 | pass 91 | 92 | 93 | def parse_singular_param_to_enum(param: Optional[Union[T, str]], 94 | class_name: Type[T]) -> Optional[T]: 95 | if param is None: 96 | return None 97 | if isinstance(param, str): 98 | try: 99 | return class_name[param.upper()] 100 | except KeyError: 101 | raise KeyError( 102 | f"Can not parse str '{param}' to {class_name.__name__}. " 103 | f"Allowed values are {[e.name for e in class_name]}") 104 | 105 | return param 106 | 107 | 108 | def parse_iterable_param_to_enums( 109 | param_values: Optional[Union[Iterable[str], Iterable[T]]], 110 | class_name: Type[T]) -> Optional[Iterable[T]]: 111 | if param_values is None: 112 | return None 113 | 114 | try: 115 | return [class_name[value.upper()] if isinstance(value, str) else value 116 | for value in param_values] 117 | except KeyError: 118 | raise KeyError( 119 | f"Can not parse '{param_values}' to " 120 | f"Optional[Iterable[{class_name.__name__}]]. " 121 | f"Please ensure param all list elements exist in " 122 | f"{[e.name for e in class_name]}") 123 | 124 | 125 | def to_camel_case(snake_case_str: str): 126 | if snake_case_str is None or len(snake_case_str) == 0: 127 | raise ValueError( 128 | f"Please ensure arg name {snake_case_str} is not empty!") 129 | 130 | if not is_snake_case(snake_case_str) and not is_word(snake_case_str): 131 | raise ValueError( 132 | f"Please ensure {snake_case_str} is a word or snake case " 133 | f"string with underscore as separator.") 134 | words = snake_case_str.split('_') 135 | return words[0] + ''.join([ele.title() for ele in words[1:]]) 136 | 137 | 138 | def is_snake_case(input_string: str) -> bool: 139 | """ 140 | Checks if a string is formatted as "snake case". 141 | A string is considered snake case when: 142 | - it's composed only by lowercase/uppercase letters and digits 143 | - it contains at least one underscore 144 | - it does not start with a number 145 | *Examples:* 146 | >>> is_snake_case('foo_bar_baz') # returns true 147 | >>> is_snake_case('foo') # returns false 148 | :param input_string: String to test. 149 | :return: True for a snake case string, false otherwise. 150 | """ 151 | return SNAKE_CASE_RE.match(input_string) is not None 152 | 153 | 154 | def is_word(input_string: str) -> bool: 155 | """ 156 | Checks if a string is one word. 157 | A string is considered one word when: 158 | - it's composed only by lowercase/uppercase letters and digits 159 | - it does not start with a number 160 | *Examples:* 161 | >>> is_word('1foo') # returns false 162 | >>> is_word('foo_') # returns false 163 | >>> is_word('foo') # returns true 164 | :param input_string: String to test. 165 | :return: True for one word string, false otherwise. 166 | """ 167 | return WORD_RE.match(input_string) is not None 168 | 169 | 170 | class StringifyEnumJsonEncoder(JSONEncoder): 171 | def default(self, o): 172 | if isinstance(o, StringifyEnum): 173 | return str(o) 174 | 175 | return super().default(o) 176 | -------------------------------------------------------------------------------- /azure/functions/extension/func_extension_base.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import abc 5 | import os 6 | import typing 7 | from logging import Logger 8 | from .extension_meta import ExtensionMeta 9 | from .extension_scope import ExtensionScope 10 | from .function_extension_exception import FunctionExtensionException 11 | from .._abc import Context 12 | 13 | 14 | class FuncExtensionBase(metaclass=ExtensionMeta): 15 | """An abstract class defines the life-cycle hooks which to be implemented 16 | by customer's extension. 17 | 18 | Everytime when a new extension is initialized in customer function scripts, 19 | the ExtensionManager._func_exts field records the extension to this 20 | specific function name. 21 | """ 22 | 23 | _scope = ExtensionScope.FUNCTION 24 | 25 | @abc.abstractmethod 26 | def __init__(self, file_path: str): 27 | """Constructor for extension. This needs to be implemented and ensure 28 | super().__init__(file_path) is called. 29 | 30 | The initializer serializes the extension to a tree. This speeds 31 | up the worker lookup and reduce the overhead on each invocation. 32 | _func_exts[]..(ext_name, ext_impl) 33 | (e.g. _func_exts['HttpTrigger'].pre_invocation.ext_impl) 34 | 35 | Parameters 36 | ---------- 37 | file_path: str 38 | The name of trigger the extension attaches to (e.g. __file__). 39 | """ 40 | script_root = os.getenv('AzureWebJobsScriptRoot') 41 | if script_root is None: 42 | raise FunctionExtensionException( 43 | 'AzureWebJobsScriptRoot environment variable is not defined. ' 44 | 'Please ensure the extension is running in Azure Functions.' 45 | ) 46 | 47 | # Split will always return ('') in if no folder exist in the path 48 | relpath_to_project_root = os.path.relpath( 49 | os.path.normpath(file_path), 50 | os.path.normpath(script_root) 51 | ) 52 | 53 | trigger_name = (relpath_to_project_root.split(os.sep) or [''])[0] 54 | if not trigger_name or trigger_name.startswith(('.', '..')): 55 | raise FunctionExtensionException( 56 | 'Failed to parse trigger name from filename. ' 57 | 'Function extension should bind to a trigger script, ' 58 | 'not share folder. Please ensure extension is create inside a' 59 | 'trigger while __file__ is passed into the argument. ' 60 | 'The trigger name is resolved from os.path.relpath(file_path,' 61 | 'project_root).' 62 | ) 63 | 64 | # This is used in ExtensionMeta._register_function_extension 65 | self._trigger_name = trigger_name 66 | 67 | # DO NOT decorate this with @abc.abstractmethod 68 | # since implementation by subclass is not mandatory 69 | def post_function_load(self, 70 | function_name: str, 71 | function_directory: str, 72 | *args, **kwargs) -> None: 73 | """This hook will be called right after a customer's function loaded. 74 | In this stage, the customer's logger is not fully initialized, so it 75 | is not provided. Please use print() to emit message if necessary. 76 | 77 | Parameters 78 | ---------- 79 | function_name: str 80 | The name of customer's function (e.g. HttpTrigger) 81 | function_directory: str 82 | The path to customer's function directory 83 | (e.g. /home/site/wwwroot/HttpTrigger) 84 | """ 85 | pass 86 | 87 | # DO NOT decorate this with @abc.abstractmethod 88 | # since implementation by subclass is not mandatory 89 | def pre_invocation( 90 | self, 91 | logger: Logger, 92 | context: Context, 93 | func_args: typing.Optional[typing.Dict[str, object]] = None, 94 | *args, 95 | **kwargs) -> None: 96 | """This hook will be called right before customer's function 97 | is being executed. 98 | 99 | Parameters 100 | ---------- 101 | logger: logging.Logger 102 | A logger provided by Python worker. Extension developer should 103 | use this logger to emit telemetry to Azure Functions customers. 104 | context: azure.functions.Context 105 | This will include the function_name, function_directory and an 106 | invocation_id of this specific invocation. 107 | func_args: typing.Dict[str, object] 108 | Arguments that are passed into the Azure Functions. The name of 109 | each parameter is defined in function.json. Extension developers 110 | may also want to do isinstance() check if you want to apply 111 | operations to specific trigger types or input binding types. 112 | """ 113 | pass 114 | 115 | # DO NOT decorate this with @abc.abstractmethod 116 | # since implementation by subclass is not mandatory 117 | def post_invocation( 118 | self, 119 | logger: Logger, 120 | context: Context, 121 | func_args: typing.Optional[typing.Dict[str, object]] = None, 122 | func_ret: typing.Optional[object] = None, 123 | *args, 124 | **kwargs) -> None: 125 | """This hook will be called right after a customer's function 126 | is executed. 127 | 128 | Parameters 129 | ---------- 130 | logger: logging.Logger 131 | A logger provided by Python worker. Extension developer should 132 | use this logger to emit telemetry to Azure Functions customers. 133 | context: azure.functions.Context 134 | This will include the function_name, function_directory and an 135 | invocation_id of this specific invocation. 136 | func_args: typing.Dict[str, object] 137 | Arguments that are passed into the Azure Functions. The name of 138 | each parameter is defined in function.json. Extension developers 139 | may also want to do isinstance() check if you want to apply 140 | operations to specific trigger types or input binding types. 141 | func_ret: typing.Optional[object] 142 | Return value from Azure Functions. This is usually the value 143 | defined in function.json $return section. Extension developers 144 | may also want to do isinstance() check if you want to apply 145 | operations to specific types or input binding types. 146 | """ 147 | pass 148 | -------------------------------------------------------------------------------- /tests/test_eventgrid.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | from datetime import datetime 5 | import unittest 6 | from typing import List 7 | 8 | import azure.functions as func 9 | import azure.functions.eventgrid as azf_event_grid 10 | 11 | 12 | class MyTestCase(unittest.TestCase): 13 | def test_eventgrid_input_type(self): 14 | check_input_type = azf_event_grid.EventGridEventInConverter.\ 15 | check_input_type_annotation 16 | self.assertTrue(check_input_type(func.EventGridEvent)) 17 | self.assertFalse(check_input_type(List[func.EventGridEvent])) 18 | self.assertFalse(check_input_type(str)) 19 | self.assertFalse(check_input_type(bytes)) 20 | 21 | def test_eventgrid_output_type(self): 22 | check_output_type = azf_event_grid.EventGridEventOutConverter.\ 23 | check_output_type_annotation 24 | self.assertTrue(check_output_type(func.EventGridOutputEvent)) 25 | self.assertTrue(check_output_type(List[func.EventGridOutputEvent])) 26 | self.assertTrue(check_output_type(str)) 27 | self.assertTrue(check_output_type(bytes)) 28 | self.assertTrue(check_output_type(List[str])) 29 | 30 | def test_eventgrid_decode(self): 31 | eventGridEvent = azf_event_grid.EventGridEventInConverter.decode( 32 | data=self._generate_single_eventgrid_datum(), trigger_metadata=None 33 | ) 34 | self.assertEqual( 35 | eventGridEvent.id, 36 | "00010001-0001-0001-0001-000100010001") 37 | self.assertEqual(eventGridEvent.subject, "eventhubs/test") 38 | self.assertEqual(eventGridEvent.event_type, "captureFileCreated") 39 | self.assertEqual(eventGridEvent.topic, "/TestTopic/namespaces/test") 40 | self.assertIsNotNone(eventGridEvent.get_json()) 41 | 42 | def test_eventgrid_decode_with_null_data(self): 43 | eventGridEvent = azf_event_grid.EventGridEventInConverter.decode( 44 | data=self._generate_single_eventgrid_datum( 45 | with_data=False), trigger_metadata=None) 46 | self.assertEqual( 47 | eventGridEvent.id, 48 | "00010001-0001-0001-0001-000100010001") 49 | self.assertEqual(eventGridEvent.subject, "eventhubs/test") 50 | self.assertEqual(eventGridEvent.event_type, "captureFileCreated") 51 | self.assertEqual(eventGridEvent.topic, "/TestTopic/namespaces/test") 52 | self.assertIsNone(eventGridEvent.get_json()) 53 | 54 | def test_eventgrid_encode_with_str_data(self): 55 | example_data = self._generate_single_eventgrid_str() 56 | eventGridDatum = azf_event_grid.EventGridEventOutConverter.encode( 57 | example_data, expected_type=type(example_data)) 58 | self.assertEqual(eventGridDatum.type, "string") 59 | 60 | def test_eventgrid_encode_with_bytes_data(self): 61 | example_data = self._generate_single_eventgrid_str(True) 62 | eventGridDatum = azf_event_grid.EventGridEventOutConverter.encode( 63 | example_data, expected_type=type(example_data)) 64 | self.assertEqual(eventGridDatum.type, "bytes") 65 | 66 | def test_eventgrid_encode_with_EventGridData(self): 67 | example_data = self._generate_single_eventgrid_event() 68 | event_grid_datum = azf_event_grid.EventGridEventOutConverter.encode( 69 | example_data, expected_type=type(example_data)) 70 | 71 | self.assertEqual(event_grid_datum.type, "json") 72 | 73 | def test_eventgrid_encode_with_multiple_EventGridData(self): 74 | example_data = self._generate_multiple_eventgrid_event() 75 | event_grid_datum = azf_event_grid.EventGridEventOutConverter.encode( 76 | example_data, expected_type=type(example_data)) 77 | 78 | self.assertEqual(event_grid_datum.type, "json") 79 | 80 | @staticmethod 81 | def _generate_single_eventgrid_datum(with_data=True, datum_type='json'): 82 | datum_with_data = """ 83 | { 84 | "topic": "/TestTopic/namespaces/test", 85 | "subject": "eventhubs/test", 86 | "eventType": "captureFileCreated", 87 | "eventTime": "2017-07-14T23:10:27.7689666Z", 88 | "id": "00010001-0001-0001-0001-000100010001", 89 | "data": { 90 | "fileUrl": "https://test.blob.core.windows.net/debugging/testblob.txt", 91 | "fileType": "AzureBlockBlob", 92 | "partitionId": "1", 93 | "sizeInBytes": 0, 94 | "eventCount": 0, 95 | "firstSequenceNumber": -1, 96 | "lastSequenceNumber": -1, 97 | "firstEnqueueTime": "0001-01-01T00:00:00", 98 | "lastEnqueueTime": "0001-01-01T00:00:00" 99 | }, 100 | "dataVersion": "", 101 | "metadataVersion": "1" 102 | } 103 | """ 104 | datum_without_data = """ 105 | { 106 | "topic": "/TestTopic/namespaces/test", 107 | "subject": "eventhubs/test", 108 | "eventType": "captureFileCreated", 109 | "eventTime": "2017-07-14T23:10:27.7689666Z", 110 | "id": "00010001-0001-0001-0001-000100010001", 111 | "dataVersion": "", 112 | "metadataVersion": "1" 113 | }""" 114 | 115 | datum = datum_with_data if with_data else datum_without_data 116 | 117 | if datum_type == 'bytes': 118 | datum = datum.encode('utf-8') 119 | 120 | return func.meta.Datum(datum, datum_type) 121 | 122 | @staticmethod 123 | def _generate_single_eventgrid_event(with_date=True): 124 | return azf_event_grid.azf_eventgrid.EventGridOutputEvent( 125 | id="id", 126 | subject='subject', 127 | event_type='eventType', 128 | event_time=datetime.utcnow(), 129 | data={"tag1": "value1", "tag2": "value2"} if with_date else {}, 130 | data_version='dataVersion', 131 | ) 132 | 133 | @staticmethod 134 | def _generate_multiple_eventgrid_event(with_date=True): 135 | return [azf_event_grid.azf_eventgrid.EventGridOutputEvent( 136 | id="id1", 137 | subject='subject1', 138 | event_type='eventType1', 139 | event_time=datetime.utcnow(), 140 | data={"tag1": "value1", "tag2": "value2"} if with_date else {}, 141 | data_version='dataVersion', 142 | ), azf_event_grid.azf_eventgrid.EventGridOutputEvent( 143 | id="id2", 144 | subject='subject2', 145 | event_type='eventType2', 146 | event_time=datetime.utcnow(), 147 | data={"tag1": "value1", "tag2": "value2"} if with_date else {}, 148 | data_version='dataVersion', 149 | )] 150 | 151 | @staticmethod 152 | def _generate_single_eventgrid_str(in_bytes=False): 153 | string_representation = '{"id": "id", ' \ 154 | '"subject": "subject", ' \ 155 | '"dataVersion": "dataVersion", ' \ 156 | '"eventType": "eventType", ' \ 157 | '"data": {"tag1": "value1", ' \ 158 | '"tag2": "value2"}, ' \ 159 | '"eventTime": "2020-04-22T18:19:19Z"}' 160 | return string_representation.encode('utf-8') \ 161 | if in_bytes \ 162 | else string_representation 163 | 164 | 165 | if __name__ == '__main__': 166 | unittest.main() 167 | -------------------------------------------------------------------------------- /azure/functions/durable_functions.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Microsoft Corporation. All rights reserved. 2 | # Licensed under the MIT License. 3 | 4 | import typing 5 | import json 6 | 7 | from azure.functions import _durable_functions 8 | from . import meta 9 | 10 | 11 | # Durable Function Orchestration Trigger 12 | class OrchestrationTriggerConverter(meta.InConverter, 13 | meta.OutConverter, 14 | binding='orchestrationTrigger', 15 | trigger=True): 16 | @classmethod 17 | def check_input_type_annotation(cls, pytype): 18 | return issubclass(pytype, _durable_functions.OrchestrationContext) 19 | 20 | @classmethod 21 | def check_output_type_annotation(cls, pytype): 22 | # Implicit output should accept any return type 23 | return True 24 | 25 | @classmethod 26 | def decode(cls, 27 | data: meta.Datum, *, 28 | trigger_metadata) -> _durable_functions.OrchestrationContext: 29 | return _durable_functions.OrchestrationContext(data.value) 30 | 31 | @classmethod 32 | def encode(cls, obj: typing.Any, *, 33 | expected_type: typing.Optional[type]) -> meta.Datum: 34 | # Durable function context should be a json 35 | return meta.Datum(type='json', value=obj) 36 | 37 | @classmethod 38 | def has_implicit_output(cls) -> bool: 39 | return True 40 | 41 | 42 | class EnitityTriggerConverter(meta.InConverter, 43 | meta.OutConverter, 44 | binding='entityTrigger', 45 | trigger=True): 46 | @classmethod 47 | def check_input_type_annotation(cls, pytype): 48 | return issubclass(pytype, _durable_functions.EntityContext) 49 | 50 | @classmethod 51 | def check_output_type_annotation(cls, pytype): 52 | # Implicit output should accept any return type 53 | return True 54 | 55 | @classmethod 56 | def decode(cls, 57 | data: meta.Datum, *, 58 | trigger_metadata) -> _durable_functions.EntityContext: 59 | return _durable_functions.EntityContext(data.value) 60 | 61 | @classmethod 62 | def encode(cls, obj: typing.Any, *, 63 | expected_type: typing.Optional[type]) -> meta.Datum: 64 | # Durable function context should be a json 65 | return meta.Datum(type='json', value=obj) 66 | 67 | @classmethod 68 | def has_implicit_output(cls) -> bool: 69 | return True 70 | 71 | 72 | # Durable Function Activity Trigger 73 | class ActivityTriggerConverter(meta.InConverter, 74 | meta.OutConverter, 75 | binding='activityTrigger', 76 | trigger=True): 77 | @classmethod 78 | def check_input_type_annotation(cls, pytype): 79 | # Activity Trigger's arguments should accept any types 80 | return True 81 | 82 | @classmethod 83 | def check_output_type_annotation(cls, pytype): 84 | # The activity trigger should accept any JSON serializable types 85 | return True 86 | 87 | @classmethod 88 | def decode(cls, 89 | data: meta.Datum, *, 90 | trigger_metadata) -> typing.Any: 91 | data_type = data.type 92 | 93 | # Durable functions extension always returns a string of json 94 | # See durable functions library's call_activity_task docs 95 | if data_type in ['string', 'json']: 96 | try: 97 | callback = _durable_functions._deserialize_custom_object 98 | result = json.loads(data.value, object_hook=callback) 99 | except json.JSONDecodeError: 100 | # String failover if the content is not json serializable 101 | result = data.value 102 | except Exception as e: 103 | raise ValueError( 104 | 'activity trigger input must be a string or a ' 105 | f'valid json serializable ({data.value})') from e 106 | else: 107 | raise NotImplementedError( 108 | f'unsupported activity trigger payload type: {data_type}') 109 | 110 | return result 111 | 112 | @classmethod 113 | def encode(cls, obj: typing.Any, *, 114 | expected_type: typing.Optional[type]) -> meta.Datum: 115 | try: 116 | callback = _durable_functions._serialize_custom_object 117 | result = json.dumps(obj, default=callback) 118 | except TypeError as e: 119 | raise ValueError( 120 | f'activity trigger output must be json serializable ({obj})') from e 121 | 122 | return meta.Datum(type='json', value=result) 123 | 124 | @classmethod 125 | def has_implicit_output(cls) -> bool: 126 | return True 127 | 128 | 129 | # Durable Functions Durable Client Bindings 130 | class DurableClientConverter(meta.InConverter, 131 | meta.OutConverter, 132 | binding='durableClient'): 133 | @classmethod 134 | def has_implicit_output(cls) -> bool: 135 | return False 136 | 137 | @classmethod 138 | def has_trigger_support(cls) -> bool: 139 | return False 140 | 141 | @classmethod 142 | def check_input_type_annotation(cls, pytype: type) -> bool: 143 | return issubclass(pytype, (str, bytes)) 144 | 145 | @classmethod 146 | def check_output_type_annotation(cls, pytype: type) -> bool: 147 | return issubclass(pytype, (str, bytes, bytearray)) 148 | 149 | @classmethod 150 | def encode(cls, obj: typing.Any, *, 151 | expected_type: typing.Optional[type]) -> meta.Datum: 152 | if isinstance(obj, str): 153 | return meta.Datum(type='string', value=obj) 154 | 155 | elif isinstance(obj, (bytes, bytearray)): 156 | return meta.Datum(type='bytes', value=bytes(obj)) 157 | elif obj is None: 158 | return meta.Datum(type=None, value=obj) 159 | elif isinstance(obj, dict): 160 | return meta.Datum(type='dict', value=obj) 161 | elif isinstance(obj, list): 162 | return meta.Datum(type='list', value=obj) 163 | elif isinstance(obj, bool): 164 | return meta.Datum(type='bool', value=obj) 165 | elif isinstance(obj, int): 166 | return meta.Datum(type='int', value=obj) 167 | elif isinstance(obj, float): 168 | return meta.Datum(type='double', value=obj) 169 | else: 170 | raise NotImplementedError 171 | 172 | @classmethod 173 | def decode(cls, data: meta.Datum, *, trigger_metadata) -> typing.Any: 174 | if data is None: 175 | return None 176 | data_type = data.type 177 | 178 | if data_type == 'string': 179 | result = data.value 180 | elif data_type == 'bytes': 181 | result = data.value 182 | elif data_type == 'json': 183 | result = data.value 184 | elif data_type is None: 185 | result = None 186 | else: 187 | raise ValueError( 188 | 'unexpected type of data received for the "generic" binding ', 189 | repr(data_type) 190 | ) 191 | 192 | return result 193 | --------------------------------------------------------------------------------