├── .eslintrc.json ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE.md └── policies │ └── resourceManagement.yml ├── .gitignore ├── .prettierignore ├── .prettierrc ├── .vscode ├── extensions.json ├── launch.json ├── settings.json └── tasks.json ├── LICENSE ├── NOTICE.html ├── README.md ├── SECURITY.md ├── Worker.nuspec ├── azure-functions-language-worker-protobuf ├── .gitignore ├── CODEOWNERS ├── LICENSE ├── README.md ├── SECURITY.md └── src │ └── proto │ ├── FunctionRpc.proto │ ├── identity │ └── ClaimsIdentityRpc.proto │ └── shared │ └── NullableTypes.proto ├── azure-pipelines ├── code-mirror.yml ├── official-build.yml ├── public-build.yml ├── templates │ ├── build.yml │ ├── test.yml │ └── worker.yml └── worker-build.yml ├── package-lock.json ├── package.json ├── scripts ├── generateProtos.js └── updateVersion.ts ├── src ├── AppContext.ts ├── Disposable.ts ├── GrpcClient.ts ├── LegacyFunctionLoader.ts ├── Worker.ts ├── WorkerContext.ts ├── constants.ts ├── coreApi │ ├── converters │ │ ├── ensureKeysMatch.ts │ │ ├── fromCoreFunctionMetadata.ts │ │ ├── fromCoreInvocationResponse.ts │ │ ├── fromCoreStatusResult.ts │ │ ├── fromCoreTypedData.ts │ │ ├── handleDefaultEnumCase.ts │ │ ├── toCoreFunctionMetadata.ts │ │ ├── toCoreInvocationRequest.ts │ │ ├── toCoreStatusResult.ts │ │ └── toCoreTypedData.ts │ ├── coreApiLog.ts │ ├── registerFunction.ts │ └── setProgrammingModel.ts ├── errors.ts ├── eventHandlers │ ├── EventHandler.ts │ ├── FunctionEnvironmentReloadHandler.ts │ ├── FunctionLoadHandler.ts │ ├── FunctionsMetadataHandler.ts │ ├── InvocationHandler.ts │ ├── WorkerInitHandler.ts │ ├── getWorkerCapabilities.ts │ ├── getWorkerMetadata.ts │ └── terminateWorker.ts ├── hooks │ ├── LogHookContext.ts │ ├── executeHooks.ts │ ├── getHooks.ts │ └── registerHook.ts ├── loadScriptFile.ts ├── nodejsWorker.ts ├── parsers │ └── parsePackageJson.ts ├── setupCoreModule.ts ├── setupEventStream.ts ├── startApp.ts └── utils │ ├── Logger.ts │ ├── blockedMonitor.ts │ ├── delay.ts │ ├── nonNull.ts │ └── util.ts ├── test ├── Worker.test.ts ├── blockMonitorTest.ts ├── errors.test.ts ├── eventHandlers │ ├── FunctionEnvironmentReloadHandler.test.ts │ ├── FunctionLoadHandler.test.ts │ ├── InvocationHandler.test.ts │ ├── TestEventStream.ts │ ├── WorkerInitHandler.test.ts │ ├── WorkerStatusHandler.test.ts │ ├── beforeEventHandlerSuite.ts │ ├── msg.ts │ ├── terminateWorker.test.ts │ ├── testApp │ │ ├── .gitignore │ │ └── src │ │ │ ├── doNothing.cjs │ │ │ ├── doNothing.js │ │ │ ├── doNothing.mjs │ │ │ ├── doNothing2.js │ │ │ ├── helloWorld.js │ │ │ ├── longLoad.js │ │ │ ├── moduleNotAFunction.js │ │ │ ├── moduleWithThis.js │ │ │ ├── registerAppStartHook.js │ │ │ ├── registerFunction.js │ │ │ └── throwError.js │ └── testAppUtils.ts ├── loadScriptFile.test.ts ├── mochaReporterOptions.json ├── parsers │ └── parsePackageJson.test.ts └── startApp.test.ts ├── tsconfig.json ├── types-core └── index.d.ts ├── webpack.config.js └── worker.config.json /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "@typescript-eslint/parser", 3 | "plugins": ["@typescript-eslint", "header", "deprecation", "simple-import-sort", "import"], 4 | "parserOptions": { 5 | "project": "tsconfig.json", 6 | "sourceType": "module" 7 | }, 8 | "extends": [ 9 | "eslint:recommended", 10 | "plugin:@typescript-eslint/recommended", 11 | "plugin:@typescript-eslint/recommended-requiring-type-checking", 12 | "plugin:prettier/recommended" 13 | ], 14 | "rules": { 15 | "header/header": [ 16 | 2, 17 | "line", 18 | [" Copyright (c) .NET Foundation. All rights reserved.", " Licensed under the MIT License."], 19 | 2 20 | ], 21 | "deprecation/deprecation": "error", 22 | "@typescript-eslint/ban-types": "off", 23 | "@typescript-eslint/explicit-module-boundary-types": "off", 24 | "@typescript-eslint/no-empty-function": "off", 25 | "@typescript-eslint/no-explicit-any": "off", 26 | "@typescript-eslint/no-namespace": "off", 27 | "@typescript-eslint/no-non-null-assertion": "off", 28 | "@typescript-eslint/no-unsafe-argument": "off", 29 | "@typescript-eslint/no-unsafe-assignment": "off", 30 | "@typescript-eslint/no-unsafe-call": "off", 31 | "@typescript-eslint/no-unsafe-member-access": "off", 32 | "@typescript-eslint/no-unsafe-return": "off", 33 | "@typescript-eslint/no-unused-vars": ["error", { "argsIgnorePattern": "^_" }], 34 | "@typescript-eslint/require-await": "off", 35 | "@typescript-eslint/restrict-plus-operands": "off", 36 | "@typescript-eslint/restrict-template-expressions": "off", 37 | "@typescript-eslint/unbound-method": "off", 38 | "no-empty": "off", 39 | "prefer-const": ["error", { "destructuring": "all" }], 40 | "prefer-rest-params": "off", 41 | "prefer-spread": "off", 42 | "@typescript-eslint/explicit-member-accessibility": [ 43 | "error", 44 | { 45 | "accessibility": "no-public" 46 | } 47 | ], 48 | "simple-import-sort/imports": [ 49 | "error", 50 | { 51 | "groups": [["^\\u0000", "^node:", "^@?\\w", "^", "^\\."]] 52 | } 53 | ], 54 | "simple-import-sort/exports": "error", 55 | "import/first": "error", 56 | "import/newline-after-import": "error", 57 | "import/no-duplicates": "error" 58 | }, 59 | "ignorePatterns": ["**/*.js", "**/*.mjs", "**/*.cjs", "dist", "azure-functions-language-worker-protobuf"] 60 | } 61 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @azure/azure-functions-nodejs -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | #### Investigative information 7 | 8 | Please provide the following: 9 | 10 | - Timestamp: 11 | - Function App name: 12 | - Function name(s) (as appropriate): 13 | - Invocation ID: 14 | - Region: 15 | 16 | 19 | 20 | #### Repro steps 21 | 22 | Provide the steps required to reproduce the problem: 23 | 24 | 30 | 31 | #### Expected behavior 32 | 33 | Provide a description of the expected behavior. 34 | 35 | 40 | 41 | #### Actual behavior 42 | 43 | Provide a description of the actual behavior observed. 44 | 45 | 50 | 51 | #### Known workarounds 52 | 53 | Provide a description of any known workarounds. 54 | 55 | 60 | 61 | #### Related information 62 | 63 | Provide any related information 64 | 65 | * Programming language used 66 | * Links to source 67 | * Bindings used 68 | 95 | -------------------------------------------------------------------------------- /.github/policies/resourceManagement.yml: -------------------------------------------------------------------------------- 1 | id: 2 | name: GitOps.PullRequestIssueManagement 3 | description: GitOps.PullRequestIssueManagement primitive 4 | owner: 5 | resource: repository 6 | disabled: false 7 | where: 8 | configuration: 9 | resourceManagementConfiguration: 10 | scheduledSearches: 11 | - description: 12 | frequencies: 13 | - hourly: 14 | hour: 3 15 | filters: 16 | - isIssue 17 | - isOpen 18 | - hasLabel: 19 | label: 'Needs: Author Feedback' 20 | - hasLabel: 21 | label: no recent activity 22 | - noActivitySince: 23 | days: 3 24 | actions: 25 | - closeIssue 26 | - description: 27 | frequencies: 28 | - hourly: 29 | hour: 3 30 | filters: 31 | - isIssue 32 | - isOpen 33 | - hasLabel: 34 | label: 'Needs: Author Feedback' 35 | - noActivitySince: 36 | days: 4 37 | - isNotLabeledWith: 38 | label: no recent activity 39 | actions: 40 | - addLabel: 41 | label: no recent activity 42 | - addReply: 43 | reply: This issue has been automatically marked as stale because it has been marked as requiring author feedback but has not had any activity for **4 days**. It will be closed if no further activity occurs **within 3 days of this comment**. 44 | - description: 45 | frequencies: 46 | - hourly: 47 | hour: 3 48 | filters: 49 | - isIssue 50 | - isOpen 51 | - hasLabel: 52 | label: duplicate 53 | - noActivitySince: 54 | days: 3 55 | actions: 56 | - addReply: 57 | reply: This issue has been marked as duplicate and has not had any activity for **3 days**. It will be closed for housekeeping purposes. 58 | - closeIssue 59 | - description: 60 | frequencies: 61 | - hourly: 62 | hour: 3 63 | filters: 64 | - isPullRequest 65 | - isOpen 66 | - hasLabel: 67 | label: 'Needs: Author Feedback' 68 | - hasLabel: 69 | label: no recent activity 70 | - noActivitySince: 71 | days: 7 72 | actions: 73 | - closeIssue 74 | - description: 75 | frequencies: 76 | - hourly: 77 | hour: 3 78 | filters: 79 | - isPullRequest 80 | - isOpen 81 | - hasLabel: 82 | label: 'Needs: Author Feedback' 83 | - noActivitySince: 84 | days: 7 85 | - isNotLabeledWith: 86 | label: no recent activity 87 | actions: 88 | - addLabel: 89 | label: no recent activity 90 | - addReply: 91 | reply: This pull request has been automatically marked as stale because it has been marked as requiring author feedback but has not had any activity for **7 days**. It will be closed if no further activity occurs **within 7 days of this comment**. 92 | eventResponderTasks: 93 | - if: 94 | - payloadType: Issue_Comment 95 | - isAction: 96 | action: Created 97 | - isActivitySender: 98 | issueAuthor: True 99 | - hasLabel: 100 | label: 'Needs: Author Feedback' 101 | then: 102 | - addLabel: 103 | label: 'Needs: Attention :wave:' 104 | - removeLabel: 105 | label: 'Needs: Author Feedback' 106 | description: 107 | - if: 108 | - payloadType: Issues 109 | - not: 110 | isAction: 111 | action: Closed 112 | - hasLabel: 113 | label: no recent activity 114 | then: 115 | - removeLabel: 116 | label: no recent activity 117 | description: 118 | - if: 119 | - payloadType: Issues 120 | - isAction: 121 | action: Closed 122 | - hasLabel: 123 | label: 'Needs: Triage (Functions)' 124 | then: 125 | - removeLabel: 126 | label: 'Needs: Triage (Functions)' 127 | description: 128 | - if: 129 | - payloadType: Issue_Comment 130 | - hasLabel: 131 | label: no recent activity 132 | then: 133 | - removeLabel: 134 | label: no recent activity 135 | description: 136 | - if: 137 | - payloadType: Pull_Request_Review 138 | - isAction: 139 | action: Submitted 140 | - isReviewState: 141 | reviewState: Changes_requested 142 | then: 143 | - addLabel: 144 | label: 'Needs: Author Feedback' 145 | description: 146 | - if: 147 | - payloadType: Pull_Request 148 | - isActivitySender: 149 | issueAuthor: True 150 | - not: 151 | isAction: 152 | action: Closed 153 | - hasLabel: 154 | label: 'Needs: Author Feedback' 155 | then: 156 | - removeLabel: 157 | label: 'Needs: Author Feedback' 158 | description: 159 | - if: 160 | - payloadType: Issue_Comment 161 | - isActivitySender: 162 | issueAuthor: True 163 | - hasLabel: 164 | label: 'Needs: Author Feedback' 165 | then: 166 | - removeLabel: 167 | label: 'Needs: Author Feedback' 168 | description: 169 | - if: 170 | - payloadType: Pull_Request_Review 171 | - isActivitySender: 172 | issueAuthor: True 173 | - hasLabel: 174 | label: 'Needs: Author Feedback' 175 | then: 176 | - removeLabel: 177 | label: 'Needs: Author Feedback' 178 | description: 179 | - if: 180 | - payloadType: Pull_Request 181 | - not: 182 | isAction: 183 | action: Closed 184 | - hasLabel: 185 | label: no recent activity 186 | then: 187 | - removeLabel: 188 | label: no recent activity 189 | description: 190 | - if: 191 | - payloadType: Issue_Comment 192 | - hasLabel: 193 | label: no recent activity 194 | then: 195 | - removeLabel: 196 | label: no recent activity 197 | description: 198 | - if: 199 | - payloadType: Pull_Request_Review 200 | - hasLabel: 201 | label: no recent activity 202 | then: 203 | - removeLabel: 204 | label: no recent activity 205 | description: 206 | - if: 207 | - payloadType: Pull_Request 208 | - hasLabel: 209 | label: auto merge 210 | then: 211 | - enableAutoMerge: 212 | mergeMethod: Squash 213 | description: 214 | - if: 215 | - payloadType: Pull_Request 216 | - labelRemoved: 217 | label: auto merge 218 | then: 219 | - disableAutoMerge 220 | description: 221 | onFailure: 222 | onSuccess: 223 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # nyc test coverage 18 | .nyc_output 19 | 20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 21 | .grunt 22 | 23 | # node-waf configuration 24 | .lock-wscript 25 | 26 | # Compiled binary addons (http://nodejs.org/api/addons.html) 27 | build/Release 28 | 29 | # Dependency directories 30 | node_modules 31 | jspm_packages 32 | 33 | # Optional npm cache directory 34 | .npm 35 | 36 | # Optional REPL history 37 | .node_repl_history 38 | azure-functions-language-worker-protobuf/* 39 | 40 | dist 41 | pkg 42 | types/*.tgz 43 | 44 | **/*-test-results.xml 45 | testResults 46 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | out 2 | dist 3 | node_modules 4 | azure-functions-language-worker-protobuf 5 | NOTICE.html 6 | 7 | # Exclude markdown until this bug is fixed: https://github.com/prettier/prettier/issues/5019 8 | *.md -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "tabWidth": 4, 3 | "singleQuote": true, 4 | "printWidth": 120, 5 | "endOfLine": "auto" 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"] 3 | } 4 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "type": "node", 6 | "request": "attach", 7 | "name": "Attach by Process ID", 8 | "processId": "${command:PickProcess}" 9 | }, 10 | { 11 | "name": "Launch Unit Tests", 12 | "runtimeExecutable": "npm", 13 | "runtimeArgs": ["test"], 14 | "request": "launch", 15 | "skipFiles": ["/**"], 16 | "type": "pwa-node" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "azureFunctions.showProjectWarning": false, 3 | "editor.codeActionsOnSave": ["source.fixAll"], 4 | "editor.formatOnSave": true, 5 | "editor.defaultFormatter": "esbenp.prettier-vscode", 6 | "typescript.tsdk": "node_modules/typescript/lib", 7 | "typescript.preferences.importModuleSpecifier": "relative" 8 | } 9 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "npm: watch", 6 | "type": "npm", 7 | "script": "watch", 8 | "group": { 9 | "kind": "build", 10 | "isDefault": true 11 | }, 12 | "problemMatcher": "$tsc-watch", 13 | "isBackground": true, 14 | "presentation": { 15 | "reveal": "never" 16 | } 17 | }, 18 | { 19 | "type": "npm", 20 | "script": "lint", 21 | "problemMatcher": "$eslint-stylish", 22 | "label": "npm: lint" 23 | } 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) .NET Foundation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Azure Functions Node.js Worker 2 | 3 | |Branch|Status|[Runtime Version](https://docs.microsoft.com/azure/azure-functions/functions-versions)|Support level|Node.js Versions| 4 | |---|---|---|---|---| 5 | |v3.x (default)|[![Build Status](https://img.shields.io/azure-devops/build/azfunc/public/527/v3.x)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=527&branchName=v3.x) [![Test Status](https://img.shields.io/azure-devops/tests/azfunc/public/527/v3.x?compact_message)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=527&branchName=v3.x)|4|GA|20, 18| 6 | 7 | > NOTE: The branch corresponds to the _worker_ version, which is intentionally decoupled from the _runtime_ version. 8 | 9 | ## Getting Started 10 | 11 | - [Create your first Node.js function](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-node) 12 | - [Node.js developer guide](https://docs.microsoft.com/azure/azure-functions/functions-reference-node) 13 | - [Language Extensibility Wiki](https://github.com/Azure/azure-webjobs-sdk-script/wiki/Language-Extensibility) 14 | 15 | ## Contributing 16 | 17 | - Clone the repository locally and open in VS Code 18 | - Run "Extensions: Show Recommended Extensions" from the [command palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette) and install all extensions listed under "Workspace Recommendations" 19 | - Run `npm install` and `npm run build` 20 | - Create or open a local function app to test with 21 | - In the local function app, add the following settings to your "local.settings.json" file or configure them directly as environment variables 22 | - `languageWorkers__node__workerDirectory`: `` 23 | - `languageWorkers__node__arguments`: `--inspect` 24 | > 💡 Tip #1: Set `logging__logLevel__Worker` to `debug` if you want to view worker-specific logs in the output of `func start` 25 | 26 | > 💡 Tip #2: If you need to debug worker initialization, use `--inspect-brk` instead of `--inspect`. Just keep in mind you need to attach the debugger within 30 seconds or the host process will timeout. 27 | - Start the local function app (i.e. run `func start` or press F5) 28 | - Back in the worker repository, press F5 and select the process for your running function app 29 | - Before you submit a PR, run `npm run lint` and `npm test` and fix any issues. If you want to debug the tests, switch your [launch profile](https://code.visualstudio.com/docs/editor/debugging) in VS Code to "Launch Unit Tests" and press F5. 30 | 31 | ## Repositories 32 | 33 | These are the most important GitHub repositories that make up the Node.js experience on Azure Functions: 34 | 35 | - [azure-functions-nodejs-library](https://github.com/Azure/azure-functions-nodejs-library): The `@azure/functions` [npm package](https://www.npmjs.com/package/@azure/functions) that you include in your app. 36 | - [azure-functions-nodejs-worker](https://github.com/Azure/azure-functions-nodejs-worker): The other half of the Node.js experience that ships directly in Azure. 37 | - [azure-functions-nodejs-e2e-tests](https://github.com/Azure/azure-functions-nodejs-e2e-tests): A set of automated end-to-end tests designed to run against prerelease versions of all Node.js components. 38 | - [azure-functions-host](https://github.com/Azure/azure-functions-host): The runtime shared by all languages in Azure Functions. 39 | - [azure-functions-core-tools](https://github.com/Azure/azure-functions-core-tools): The CLI used to test Azure Functions locally. 40 | 41 | ### Code of Conduct 42 | 43 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 44 | 45 | ### Contributing to TypeScript type definitions 46 | 47 | The type definitions supplied by the `@azure/functions` [npm package](https://www.npmjs.com/package/@azure/functions) are located in the `types` folder. Any changes should be applied directly to `./types/index.d.ts`. Please make sure to update the tests in `./types/index.test.ts` as well. 48 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Security 4 | 5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 6 | 7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. 8 | 9 | ## Reporting Security Issues 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues.** 12 | 13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). 14 | 15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). 16 | 17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). 18 | 19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 20 | 21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 22 | * Full paths of source file(s) related to the manifestation of the issue 23 | * The location of the affected source code (tag/branch/commit or direct URL) 24 | * Any special configuration required to reproduce the issue 25 | * Step-by-step instructions to reproduce the issue 26 | * Proof-of-concept or exploit code (if possible) 27 | * Impact of the issue, including how an attacker might exploit the issue 28 | 29 | This information will help us triage your report more quickly. 30 | 31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. 32 | 33 | ## Preferred Languages 34 | 35 | We prefer all communications to be in English. 36 | 37 | ## Policy 38 | 39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). 40 | 41 | 42 | -------------------------------------------------------------------------------- /Worker.nuspec: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Microsoft.Azure.Functions.NodeJsWorker 5 | 3.10.1 6 | Microsoft 7 | Microsoft 8 | false 9 | Microsoft Azure Functions NodeJs Worker 10 | © .NET Foundation. All rights reserved. 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /azure-functions-language-worker-protobuf/.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.suo 8 | *.user 9 | *.userosscache 10 | *.sln.docstates 11 | 12 | # User-specific files (MonoDevelop/Xamarin Studio) 13 | *.userprefs 14 | 15 | # Build results 16 | [Dd]ebug/ 17 | [Dd]ebugPublic/ 18 | [Rr]elease/ 19 | [Rr]eleases/ 20 | x64/ 21 | x86/ 22 | bld/ 23 | [Bb]in/ 24 | [Oo]bj/ 25 | [Ll]og/ 26 | 27 | # Visual Studio 2015 cache/options directory 28 | .vs/ 29 | # Uncomment if you have tasks that create the project's static files in wwwroot 30 | #wwwroot/ 31 | 32 | # MSTest test Results 33 | [Tt]est[Rr]esult*/ 34 | [Bb]uild[Ll]og.* 35 | 36 | # NUNIT 37 | *.VisualState.xml 38 | TestResult.xml 39 | 40 | # Build Results of an ATL Project 41 | [Dd]ebugPS/ 42 | [Rr]eleasePS/ 43 | dlldata.c 44 | 45 | # .NET Core 46 | project.lock.json 47 | project.fragment.lock.json 48 | artifacts/ 49 | **/Properties/launchSettings.json 50 | 51 | *_i.c 52 | *_p.c 53 | *_i.h 54 | *.ilk 55 | *.meta 56 | *.obj 57 | *.pch 58 | *.pdb 59 | *.pgc 60 | *.pgd 61 | *.rsp 62 | *.sbr 63 | *.tlb 64 | *.tli 65 | *.tlh 66 | *.tmp 67 | *.tmp_proj 68 | *.log 69 | *.vspscc 70 | *.vssscc 71 | .builds 72 | *.pidb 73 | *.svclog 74 | *.scc 75 | 76 | # Chutzpah Test files 77 | _Chutzpah* 78 | 79 | # Visual C++ cache files 80 | ipch/ 81 | *.aps 82 | *.ncb 83 | *.opendb 84 | *.opensdf 85 | *.sdf 86 | *.cachefile 87 | *.VC.db 88 | *.VC.VC.opendb 89 | 90 | # Visual Studio profiler 91 | *.psess 92 | *.vsp 93 | *.vspx 94 | *.sap 95 | 96 | # TFS 2012 Local Workspace 97 | $tf/ 98 | 99 | # Guidance Automation Toolkit 100 | *.gpState 101 | 102 | # ReSharper is a .NET coding add-in 103 | _ReSharper*/ 104 | *.[Rr]e[Ss]harper 105 | *.DotSettings.user 106 | 107 | # JustCode is a .NET coding add-in 108 | .JustCode 109 | 110 | # TeamCity is a build add-in 111 | _TeamCity* 112 | 113 | # DotCover is a Code Coverage Tool 114 | *.dotCover 115 | 116 | # Visual Studio code coverage results 117 | *.coverage 118 | *.coveragexml 119 | 120 | # NCrunch 121 | _NCrunch_* 122 | .*crunch*.local.xml 123 | nCrunchTemp_* 124 | 125 | # MightyMoose 126 | *.mm.* 127 | AutoTest.Net/ 128 | 129 | # Web workbench (sass) 130 | .sass-cache/ 131 | 132 | # Installshield output folder 133 | [Ee]xpress/ 134 | 135 | # DocProject is a documentation generator add-in 136 | DocProject/buildhelp/ 137 | DocProject/Help/*.HxT 138 | DocProject/Help/*.HxC 139 | DocProject/Help/*.hhc 140 | DocProject/Help/*.hhk 141 | DocProject/Help/*.hhp 142 | DocProject/Help/Html2 143 | DocProject/Help/html 144 | 145 | # Click-Once directory 146 | publish/ 147 | 148 | # Publish Web Output 149 | *.[Pp]ublish.xml 150 | *.azurePubxml 151 | # TODO: Comment the next line if you want to checkin your web deploy settings 152 | # but database connection strings (with potential passwords) will be unencrypted 153 | *.pubxml 154 | *.publishproj 155 | 156 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 157 | # checkin your Azure Web App publish settings, but sensitive information contained 158 | # in these scripts will be unencrypted 159 | PublishScripts/ 160 | 161 | # NuGet Packages 162 | *.nupkg 163 | # The packages folder can be ignored because of Package Restore 164 | **/packages/* 165 | # except build/, which is used as an MSBuild target. 166 | !**/packages/build/ 167 | # Uncomment if necessary however generally it will be regenerated when needed 168 | #!**/packages/repositories.config 169 | # NuGet v3's project.json files produces more ignorable files 170 | *.nuget.props 171 | *.nuget.targets 172 | 173 | # Microsoft Azure Build Output 174 | csx/ 175 | *.build.csdef 176 | 177 | # Microsoft Azure Emulator 178 | ecf/ 179 | rcf/ 180 | 181 | # Windows Store app package directories and files 182 | AppPackages/ 183 | BundleArtifacts/ 184 | Package.StoreAssociation.xml 185 | _pkginfo.txt 186 | 187 | # Visual Studio cache files 188 | # files ending in .cache can be ignored 189 | *.[Cc]ache 190 | # but keep track of directories ending in .cache 191 | !*.[Cc]ache/ 192 | 193 | # Others 194 | ClientBin/ 195 | ~$* 196 | *~ 197 | *.dbmdl 198 | *.dbproj.schemaview 199 | *.jfm 200 | *.pfx 201 | *.publishsettings 202 | orleans.codegen.cs 203 | 204 | # Since there are multiple workflows, uncomment next line to ignore bower_components 205 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 206 | #bower_components/ 207 | 208 | # RIA/Silverlight projects 209 | Generated_Code/ 210 | 211 | # Backup & report files from converting an old project file 212 | # to a newer Visual Studio version. Backup files are not needed, 213 | # because we have git ;-) 214 | _UpgradeReport_Files/ 215 | Backup*/ 216 | UpgradeLog*.XML 217 | UpgradeLog*.htm 218 | 219 | # SQL Server files 220 | *.mdf 221 | *.ldf 222 | *.ndf 223 | 224 | # Business Intelligence projects 225 | *.rdl.data 226 | *.bim.layout 227 | *.bim_*.settings 228 | 229 | # Microsoft Fakes 230 | FakesAssemblies/ 231 | 232 | # GhostDoc plugin setting file 233 | *.GhostDoc.xml 234 | 235 | # Node.js Tools for Visual Studio 236 | .ntvs_analysis.dat 237 | node_modules/ 238 | 239 | # Typescript v1 declaration files 240 | typings/ 241 | 242 | # Visual Studio 6 build log 243 | *.plg 244 | 245 | # Visual Studio 6 workspace options file 246 | *.opt 247 | 248 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 249 | *.vbw 250 | 251 | # Visual Studio LightSwitch build output 252 | **/*.HTMLClient/GeneratedArtifacts 253 | **/*.DesktopClient/GeneratedArtifacts 254 | **/*.DesktopClient/ModelManifest.xml 255 | **/*.Server/GeneratedArtifacts 256 | **/*.Server/ModelManifest.xml 257 | _Pvt_Extensions 258 | 259 | # Paket dependency manager 260 | .paket/paket.exe 261 | paket-files/ 262 | 263 | # FAKE - F# Make 264 | .fake/ 265 | 266 | # JetBrains Rider 267 | .idea/ 268 | *.sln.iml 269 | 270 | # CodeRush 271 | .cr/ 272 | 273 | # Python Tools for Visual Studio (PTVS) 274 | __pycache__/ 275 | *.pyc 276 | 277 | # Cake - Uncomment if you are using it 278 | # tools/** 279 | # !tools/packages.config 280 | 281 | # Telerik's JustMock configuration file 282 | *.jmconfig 283 | 284 | # BizTalk build output 285 | *.btp.cs 286 | *.btm.cs 287 | *.odx.cs 288 | *.xsd.cs 289 | -------------------------------------------------------------------------------- /azure-functions-language-worker-protobuf/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/about-codeowners/ 2 | # for more info about CODEOWNERS file 3 | # 4 | # It uses the same pattern rule for gitignore file 5 | # https://git-scm.com/docs/gitignore#_pattern_format 6 | 7 | 8 | 9 | # AZURE FUNCTIONS TEAM 10 | # For all file changes, github would automatically 11 | # include the following people in the PRs. 12 | # Language owners should get notified of any new changes to the proto file. 13 | 14 | src/proto/FunctionRpc.proto @vrdmr @gavin-aguiar @YunchuWang @surgupta-msft @satvu @ejizba @alrod @anatolib @kaibocai @shreyas-gopalakrishna @amamounelsayed @Francisco-Gamino 15 | -------------------------------------------------------------------------------- /azure-functions-language-worker-protobuf/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /azure-functions-language-worker-protobuf/README.md: -------------------------------------------------------------------------------- 1 | # Azure Functions Language Worker Protobuf 2 | 3 | This repository contains the protobuf definition file which defines the gRPC service which is used between the [Azure Functions Host](https://github.com/Azure/azure-functions-host) and the Azure Functions language workers. This repo is shared across many repos in many languages (for each worker) by using git commands. 4 | 5 | To use this repo in Azure Functions language workers, follow steps below to add this repo as a subtree (*Adding This Repo*). If this repo is already embedded in a language worker repo, follow the steps to update the consumed file (*Pulling Updates*). 6 | 7 | Learn more about Azure Function's projects on the [meta](https://github.com/azure/azure-functions) repo. 8 | 9 | ## Adding This Repo 10 | 11 | From within the Azure Functions language worker repo: 12 | 1. Define remote branch for cleaner git commands 13 | - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` 14 | - `git fetch proto-file` 15 | 2. Index contents of azure-functions-worker-protobuf to language worker repo 16 | - `git read-tree --prefix= -u proto-file/` 17 | 3. Add new path in language worker repo to .gitignore file 18 | - In .gitignore, add path in language worker repo 19 | 4. Finalize with commit 20 | - `git commit -m "Added subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Branch: . Commit: "` 21 | - `git push` 22 | 23 | ## Pulling Updates 24 | 25 | From within the Azure Functions language worker repo: 26 | 1. Define remote branch for cleaner git commands 27 | - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` 28 | - `git fetch proto-file` 29 | 2. Pull a specific release tag 30 | - `git fetch proto-file refs/tags/` 31 | - Example: `git fetch proto-file refs/tags/v1.1.0-protofile` 32 | 3. Merge updates 33 | - Merge with an explicit path to subtree: `git merge -X subtree= --squash --allow-unrelated-histories --strategy-option theirs` 34 | - Example: `git merge -X subtree=src/WebJobs.Script.Grpc/azure-functions-language-worker-protobuf --squash v1.1.0-protofile --allow-unrelated-histories --strategy-option theirs` 35 | 4. Finalize with commit 36 | - `git commit -m "Updated subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Tag: . Commit: "` 37 | - `git push` 38 | 39 | ## Consuming FunctionRPC.proto 40 | *Note: Update versionNumber before running following commands* 41 | 42 | ## CSharp 43 | ``` 44 | set NUGET_PATH="%UserProfile%\.nuget\packages" 45 | set GRPC_TOOLS_PATH=%NUGET_PATH%\grpc.tools\\tools\windows_x86 46 | set PROTO_PATH=.\azure-functions-language-worker-protobuf\src\proto 47 | set PROTO=.\azure-functions-language-worker-protobuf\src\proto\FunctionRpc.proto 48 | set PROTOBUF_TOOLS=%NUGET_PATH%\google.protobuf.tools\\tools 49 | set MSGDIR=.\Messages 50 | 51 | if exist %MSGDIR% rmdir /s /q %MSGDIR% 52 | mkdir %MSGDIR% 53 | 54 | set OUTDIR=%MSGDIR%\DotNet 55 | mkdir %OUTDIR% 56 | %GRPC_TOOLS_PATH%\protoc.exe %PROTO% --csharp_out %OUTDIR% --grpc_out=%OUTDIR% --plugin=protoc-gen-grpc=%GRPC_TOOLS_PATH%\grpc_csharp_plugin.exe --proto_path=%PROTO_PATH% --proto_path=%PROTOBUF_TOOLS% 57 | ``` 58 | ## JavaScript 59 | In package.json, add to the build script the following commands to build .js files and to build .ts files. Use and install npm package `protobufjs`. 60 | 61 | Generate JavaScript files: 62 | ``` 63 | pbjs -t json-module -w commonjs -o azure-functions-language-worker-protobuf/src/rpc.js azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto 64 | ``` 65 | Generate TypeScript files: 66 | ``` 67 | pbjs -t static-module azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto -o azure-functions-language-worker-protobuf/src/rpc_static.js && pbts -o azure-functions-language-worker-protobuf/src/rpc.d.ts azure-functions-language-worker-protobuf/src/rpc_static.js 68 | ``` 69 | 70 | ## Java 71 | Maven plugin : [protobuf-maven-plugin](https://www.xolstice.org/protobuf-maven-plugin/) 72 | In pom.xml add following under configuration for this plugin 73 | ${basedir}//azure-functions-language-worker-protobuf/src/proto 74 | 75 | ## Python 76 | ``` 77 | python -m pip install -e .[dev] -U 78 | python setup.py build 79 | ``` 80 | 81 | ## Contributing 82 | 83 | This project welcomes contributions and suggestions. Most contributions require you to agree to a 84 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us 85 | the rights to use your contribution. For details, visit https://cla.microsoft.com. 86 | 87 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide 88 | a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions 89 | provided by the bot. You will only need to do this once across all repos using our CLA. 90 | 91 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 92 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or 93 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 94 | -------------------------------------------------------------------------------- /azure-functions-language-worker-protobuf/SECURITY.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Security 4 | 5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 6 | 7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. 8 | 9 | ## Reporting Security Issues 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues.** 12 | 13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). 14 | 15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). 16 | 17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). 18 | 19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 20 | 21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 22 | * Full paths of source file(s) related to the manifestation of the issue 23 | * The location of the affected source code (tag/branch/commit or direct URL) 24 | * Any special configuration required to reproduce the issue 25 | * Step-by-step instructions to reproduce the issue 26 | * Proof-of-concept or exploit code (if possible) 27 | * Impact of the issue, including how an attacker might exploit the issue 28 | 29 | This information will help us triage your report more quickly. 30 | 31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. 32 | 33 | ## Preferred Languages 34 | 35 | We prefer all communications to be in English. 36 | 37 | ## Policy 38 | 39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). 40 | 41 | 42 | -------------------------------------------------------------------------------- /azure-functions-language-worker-protobuf/src/proto/identity/ClaimsIdentityRpc.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | // protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 3 | 4 | option java_package = "com.microsoft.azure.functions.rpc.messages"; 5 | 6 | import "shared/NullableTypes.proto"; 7 | 8 | // Light-weight representation of a .NET System.Security.Claims.ClaimsIdentity object. 9 | // This is the same serialization as found in EasyAuth, and needs to be kept in sync with 10 | // its ClaimsIdentitySlim definition, as seen in the WebJobs extension: 11 | // https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimsIdentitySlim.cs 12 | message RpcClaimsIdentity { 13 | NullableString authentication_type = 1; 14 | NullableString name_claim_type = 2; 15 | NullableString role_claim_type = 3; 16 | repeated RpcClaim claims = 4; 17 | } 18 | 19 | // Light-weight representation of a .NET System.Security.Claims.Claim object. 20 | // This is the same serialization as found in EasyAuth, and needs to be kept in sync with 21 | // its ClaimSlim definition, as seen in the WebJobs extension: 22 | // https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimSlim.cs 23 | message RpcClaim { 24 | string value = 1; 25 | string type = 2; 26 | } 27 | -------------------------------------------------------------------------------- /azure-functions-language-worker-protobuf/src/proto/shared/NullableTypes.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | // protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 3 | 4 | option java_package = "com.microsoft.azure.functions.rpc.messages"; 5 | 6 | import "google/protobuf/timestamp.proto"; 7 | 8 | message NullableString { 9 | oneof string { 10 | string value = 1; 11 | } 12 | } 13 | 14 | message NullableDouble { 15 | oneof double { 16 | double value = 1; 17 | } 18 | } 19 | 20 | message NullableBool { 21 | oneof bool { 22 | bool value = 1; 23 | } 24 | } 25 | 26 | message NullableTimestamp { 27 | oneof timestamp { 28 | google.protobuf.Timestamp value = 1; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /azure-pipelines/code-mirror.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | branches: 3 | include: 4 | - v*.x 5 | 6 | resources: 7 | repositories: 8 | - repository: eng 9 | type: git 10 | name: engineering 11 | ref: refs/tags/release 12 | 13 | variables: 14 | - template: ci/variables/cfs.yml@eng 15 | 16 | extends: 17 | template: ci/code-mirror.yml@eng 18 | -------------------------------------------------------------------------------- /azure-pipelines/official-build.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | - name: IsPrerelease 3 | type: boolean 4 | default: true 5 | 6 | trigger: 7 | batch: true 8 | branches: 9 | include: 10 | - v3.x 11 | 12 | # CI only, does not trigger on PRs. 13 | pr: none 14 | 15 | schedules: 16 | - cron: '30 10 * * *' 17 | displayName: Nightly build 18 | always: true 19 | branches: 20 | include: 21 | - v3.x 22 | 23 | resources: 24 | repositories: 25 | - repository: 1es 26 | type: git 27 | name: 1ESPipelineTemplates/1ESPipelineTemplates 28 | ref: refs/tags/release 29 | 30 | extends: 31 | template: v1/1ES.Official.PipelineTemplate.yml@1es 32 | parameters: 33 | pool: 34 | name: 1es-pool-azfunc 35 | image: 1es-windows-2022 36 | os: windows 37 | 38 | sdl: 39 | codeql: 40 | runSourceLanguagesInSourceAnalysis: true 41 | 42 | stages: 43 | - stage: WindowsUnitTests 44 | dependsOn: [] 45 | jobs: 46 | - template: /azure-pipelines/templates/test.yml@self 47 | 48 | - stage: LinuxUnitTests 49 | dependsOn: [] 50 | jobs: 51 | - template: /azure-pipelines/templates/test.yml@self 52 | pool: 53 | name: 1es-pool-azfunc 54 | image: 1es-ubuntu-22.04 55 | os: linux 56 | 57 | - stage: Build 58 | dependsOn: [] 59 | jobs: 60 | - job: 61 | templateContext: 62 | outputs: 63 | - output: pipelineArtifact 64 | path: $(Build.ArtifactStagingDirectory)/dropOutput 65 | artifact: drop 66 | sbomBuildDropPath: '$(Build.ArtifactStagingDirectory)/dropInput' 67 | sbomPackageName: 'Azure Functions Node.js Worker' 68 | # The list of components can't be determined from the webpacked file in the staging dir, so reference the original node_modules folder 69 | sbomBuildComponentPath: '$(Build.SourcesDirectory)/node_modules' 70 | - output: nuget 71 | condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/v3.x'), eq(variables['UPLOADPACKAGETOPRERELEASEFEED'], true)) 72 | packagesToPush: '$(Build.ArtifactStagingDirectory)/dropOutput/*.nupkg' 73 | packageParentPath: '$(Build.ArtifactStagingDirectory)/dropOutput' 74 | nuGetFeedType: 'internal' 75 | publishVstsFeed: 'e6a70c92-4128-439f-8012-382fe78d6396/f37f760c-aebd-443e-9714-ce725cd427df' # AzureFunctionsPreRelease feed 76 | allowPackageConflicts: true 77 | steps: 78 | - template: /azure-pipelines/templates/build.yml@self 79 | parameters: 80 | IsPrerelease: ${{ parameters.IsPrerelease }} 81 | -------------------------------------------------------------------------------- /azure-pipelines/public-build.yml: -------------------------------------------------------------------------------- 1 | # This build is used for public PR and CI builds. 2 | 3 | trigger: 4 | batch: true 5 | branches: 6 | include: 7 | - v3.x 8 | 9 | pr: 10 | branches: 11 | include: 12 | - v3.x 13 | 14 | schedules: 15 | - cron: '30 10 * * *' 16 | displayName: Nightly build 17 | always: true 18 | branches: 19 | include: 20 | - v3.x 21 | 22 | resources: 23 | repositories: 24 | - repository: 1es 25 | type: git 26 | name: 1ESPipelineTemplates/1ESPipelineTemplates 27 | ref: refs/tags/release 28 | 29 | extends: 30 | template: v1/1ES.Unofficial.PipelineTemplate.yml@1es 31 | parameters: 32 | pool: 33 | name: 1es-pool-azfunc-public 34 | image: 1es-windows-2022 35 | os: windows 36 | 37 | sdl: 38 | codeql: 39 | compiled: 40 | enabled: true 41 | runSourceLanguagesInSourceAnalysis: true 42 | 43 | settings: 44 | # PR's from forks do not have sufficient permissions to set tags. 45 | skipBuildTagsForGitHubPullRequests: ${{ variables['System.PullRequest.IsFork'] }} 46 | 47 | stages: 48 | - stage: WindowsUnitTests 49 | dependsOn: [] 50 | jobs: 51 | - template: /azure-pipelines/templates/test.yml@self 52 | 53 | - stage: LinuxUnitTests 54 | dependsOn: [] 55 | jobs: 56 | - template: /azure-pipelines/templates/test.yml@self 57 | pool: 58 | name: 1es-pool-azfunc-public 59 | image: 1es-ubuntu-22.04 60 | os: linux 61 | 62 | - stage: Build 63 | dependsOn: [] 64 | jobs: 65 | - job: 66 | templateContext: 67 | outputs: 68 | - output: pipelineArtifact 69 | path: $(Build.ArtifactStagingDirectory)/dropOutput 70 | artifact: drop 71 | steps: 72 | - template: /azure-pipelines/templates/build.yml@self 73 | parameters: 74 | IsPrerelease: true 75 | -------------------------------------------------------------------------------- /azure-pipelines/templates/build.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - task: NodeTool@0 3 | inputs: 4 | versionSpec: 14.x 5 | displayName: 'Install Node.js' 6 | - script: npm ci 7 | displayName: 'npm ci' 8 | - script: npm audit --production 9 | displayName: 'Run vulnerability scan' 10 | - script: npm run updateVersion -- --buildNumber $(Build.BuildNumber) 11 | displayName: 'npm run updateVersion' 12 | condition: and(succeeded(), eq(${{ parameters.IsPrerelease }}, true)) 13 | - script: npm run build 14 | displayName: 'npm run build' 15 | - script: npm run webpack 16 | displayName: 'npm run webpack' 17 | - task: CopyFiles@2 18 | displayName: 'Copy worker files to staging' 19 | inputs: 20 | sourceFolder: '$(Build.SourcesDirectory)' 21 | contents: | 22 | dist/src/nodejsWorker.js 23 | dist/src/worker-bundle.js 24 | LICENSE 25 | NOTICE.html 26 | package.json 27 | worker.config.json 28 | targetFolder: '$(Build.ArtifactStagingDirectory)/dropInput' 29 | cleanTargetFolder: true 30 | - script: npm prune --production 31 | displayName: 'npm prune --production' # so that only production dependencies are included in SBOM 32 | - task: NuGetCommand@2 33 | displayName: 'NuGet pack worker' 34 | inputs: 35 | command: pack 36 | packagesToPack: '$(Build.SourcesDirectory)/Worker.nuspec' 37 | packDestination: '$(Build.ArtifactStagingDirectory)/dropInput' 38 | basePath: '$(Build.ArtifactStagingDirectory)/dropInput' 39 | - script: mkdir dropOutput && mv dropInput/*.nupkg dropOutput 40 | displayName: 'Move package to dropOutput' 41 | workingDirectory: '$(Build.ArtifactStagingDirectory)' 42 | -------------------------------------------------------------------------------- /azure-pipelines/templates/test.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: UnitTests 3 | 4 | strategy: 5 | matrix: 6 | Node14: 7 | NODE_VERSION: '14.x' 8 | Node16: 9 | NODE_VERSION: '16.x' 10 | Node18: 11 | NODE_VERSION: '18.x' 12 | Node20: 13 | NODE_VERSION: '20.x' 14 | Node22: 15 | NODE_VERSION: '22.x' 16 | 17 | steps: 18 | - task: NodeTool@0 19 | inputs: 20 | versionSpec: $(NODE_VERSION) 21 | displayName: 'Install Node.js' 22 | - script: npm ci 23 | displayName: 'npm ci' 24 | - script: npm run build 25 | displayName: 'npm run build' 26 | - script: npm run lint 27 | displayName: 'npm run lint' 28 | - script: npm run updateVersion -- --validate 29 | displayName: 'validate version' 30 | - script: npm test 31 | displayName: 'Run unit tests' 32 | - task: PublishTestResults@2 33 | displayName: 'Publish Unit Test Results' 34 | inputs: 35 | testResultsFiles: 'test/unit-test-results.xml' 36 | testRunTitle: '$(Agent.JobName)' 37 | condition: succeededOrFailed() 38 | -------------------------------------------------------------------------------- /azure-pipelines/templates/worker.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: CreateWorkerRelease 3 | displayName: 'Publish NodeJs Worker to Nuget' 4 | templateContext: 5 | outputParentDirectory: $(Build.ArtifactStagingDirectory) 6 | outputs: 7 | - output: nuget 8 | condition: and(succeeded(), ${{ eq(parameters.IsPrerelease, true) }}) 9 | packagesToPush: '$(Build.ArtifactStagingDirectory)/dropOutput/*.nupkg' 10 | publishVstsFeed: 'e6a70c92-4128-439f-8012-382fe78d6396/eb652719-f36a-4e78-8541-e13a3cd655f9' 11 | nuGetFeedType: internal 12 | allowPackageConflicts: true 13 | steps: 14 | - task: NuGetToolInstaller@1 15 | displayName: 'Use NuGet 5x' 16 | inputs: 17 | versionSpec: 5.x 18 | checkLatest: true 19 | -------------------------------------------------------------------------------- /azure-pipelines/worker-build.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | - name: IsPrerelease 3 | type: boolean 4 | default: true 5 | 6 | pr: none 7 | 8 | resources: 9 | repositories: 10 | - repository: 1es 11 | type: git 12 | name: 1ESPipelineTemplates/1ESPipelineTemplates 13 | ref: refs/tags/release 14 | 15 | extends: 16 | template: v1/1ES.Official.PipelineTemplate.yml@1es 17 | parameters: 18 | pool: 19 | name: 1es-pool-azfunc 20 | image: 1es-windows-2022 21 | os: windows 22 | sdl: 23 | codeql: 24 | runSourceLanguagesInSourceAnalysis: true 25 | 26 | stages: 27 | - stage: Build 28 | dependsOn: [] 29 | jobs: 30 | - job: 31 | templateContext: 32 | outputs: 33 | - output: pipelineArtifact 34 | path: $(Build.ArtifactStagingDirectory)/dropOutput 35 | artifact: drop 36 | sbomBuildDropPath: '$(Build.ArtifactStagingDirectory)/dropInput' 37 | sbomPackageName: 'Azure Functions Node.js Worker' 38 | # The list of components can't be determined from the webpacked file in the staging dir, so reference the original node_modules folder 39 | sbomBuildComponentPath: '$(Build.SourcesDirectory)/node_modules' 40 | steps: 41 | - template: /azure-pipelines/templates/build.yml@self 42 | parameters: 43 | IsPrerelease: ${{ parameters.IsPrerelease }} 44 | - stage: PublishNuget 45 | dependsOn: [Build] 46 | jobs: 47 | - template: /azure-pipelines/templates/worker.yml@self 48 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "azure-functions-nodejs-worker", 3 | "author": "Microsoft Corporation", 4 | "version": "3.10.1", 5 | "description": "Microsoft Azure Functions NodeJS Worker", 6 | "license": "(MIT OR Apache-2.0)", 7 | "dependencies": { 8 | "@azure/functions": "^3.5.0", 9 | "@grpc/grpc-js": "^1.8.22", 10 | "@grpc/proto-loader": "^0.7.8", 11 | "blocked-at": "^1.2.0", 12 | "fs-extra": "^10.0.1", 13 | "globby": "^11.0.0", 14 | "minimist": "^1.2.5", 15 | "p-retry": "^4.0.0" 16 | }, 17 | "devDependencies": { 18 | "@types/blocked-at": "^1.0.1", 19 | "@types/chai": "^4.2.22", 20 | "@types/chai-as-promised": "^7.1.5", 21 | "@types/fs-extra": "^9.0.13", 22 | "@types/minimist": "^1.2.2", 23 | "@types/mocha": "^2.2.48", 24 | "@types/mock-fs": "^4.13.1", 25 | "@types/node": "^16.9.6", 26 | "@types/semver": "^7.3.9", 27 | "@types/sinon": "^7.0.0", 28 | "@types/uuid": "^8.3.4", 29 | "@typescript-eslint/eslint-plugin": "^5.12.1", 30 | "@typescript-eslint/parser": "^5.12.1", 31 | "chai": "^4.2.0", 32 | "chai-as-promised": "^7.1.1", 33 | "escape-string-regexp": "^4.0.0", 34 | "eslint": "^7.32.0", 35 | "eslint-config-prettier": "^8.3.0", 36 | "eslint-plugin-deprecation": "^1.3.2", 37 | "eslint-plugin-header": "^3.1.1", 38 | "eslint-plugin-import": "^2.29.0", 39 | "eslint-plugin-prettier": "^4.0.0", 40 | "eslint-plugin-simple-import-sort": "^10.0.0", 41 | "minimist": "^1.2.6", 42 | "mocha": "^11.1.0", 43 | "mocha-junit-reporter": "^2.0.2", 44 | "mocha-multi-reporters": "^1.5.1", 45 | "mock-fs": "^5.1.2", 46 | "prettier": "^2.4.1", 47 | "protobufjs": "^7.2.5", 48 | "protobufjs-cli": "^1.1.1", 49 | "rimraf": "^2.6.3", 50 | "semver": "^7.3.5", 51 | "shx": "^0.3.3", 52 | "sinon": "^7.0.0", 53 | "ts-node": "^3.3.0", 54 | "typescript": "^4.5.5", 55 | "webpack": "^5.94.0", 56 | "webpack-cli": "^4.8.0" 57 | }, 58 | "homepage": "https://github.com/Azure/azure-functions-nodejs-worker", 59 | "repository": { 60 | "type": "git", 61 | "url": "https://github.com/Azure/azure-functions-nodejs-worker.git" 62 | }, 63 | "bugs": { 64 | "url": "https://github.com/Azure/azure-functions-nodejs-worker/issues" 65 | }, 66 | "scripts": { 67 | "clean": "rimraf dist && rimraf azure-functions-language-worker-protobuf/src/rpc*", 68 | "build": "rimraf dist && npm run gen && shx mkdir -p dist/azure-functions-language-worker-protobuf/src && shx cp azure-functions-language-worker-protobuf/src/rpc.* dist/azure-functions-language-worker-protobuf/src/. && tsc", 69 | "gen": "node scripts/generateProtos.js", 70 | "test": "mocha -r ts-node/register \"./test/**/*.ts\" --reporter mocha-multi-reporters --reporter-options configFile=test/mochaReporterOptions.json", 71 | "lint": "eslint .", 72 | "lint-fix": "eslint . --fix", 73 | "format": "prettier . --write", 74 | "updateVersion": "ts-node ./scripts/updateVersion.ts", 75 | "watch": "tsc --watch", 76 | "webpack": "webpack --mode production" 77 | }, 78 | "files": [ 79 | "dist" 80 | ], 81 | "main": "dist/src/nodejsWorker.js" 82 | } 83 | -------------------------------------------------------------------------------- /scripts/generateProtos.js: -------------------------------------------------------------------------------- 1 | const util = require('util'); 2 | const exec = util.promisify(require('child_process').exec); 3 | const path = require('path'); 4 | 5 | async function generateProtos() { 6 | try { 7 | const protoSrc = path.join(__dirname, '..', 'azure-functions-language-worker-protobuf', 'src'); 8 | const protoRoot = path.join(protoSrc, 'proto'); 9 | 10 | const protoFiles = [ 11 | path.join(protoRoot, 'shared', 'NullableTypes.proto'), 12 | path.join(protoRoot, 'identity', 'ClaimsIdentityRpc.proto'), 13 | path.join(protoRoot, 'FunctionRpc.proto'), 14 | ].join(' '); 15 | 16 | console.log('Compiling protobuf definitions...'); 17 | 18 | console.log('Compiling to JavaScript...'); 19 | const jsOut = path.join(protoSrc, 'rpc.js'); 20 | await run(`pbjs -t json-module -w commonjs -o ${jsOut} ${protoFiles}`); 21 | console.log(`Compiled to JavaScript: "${jsOut}"`); 22 | 23 | console.log('Compiling to JavaScript static module...'); 24 | const jsStaticOut = path.join(protoSrc, 'rpc_static.js'); 25 | await run(`pbjs -t static-module -o ${jsStaticOut} ${protoFiles}`); 26 | console.log(`Compiled to JavaScript static module: "${jsStaticOut}"`); 27 | 28 | console.log('Compiling to TypeScript...'); 29 | const dTsOut = path.join(protoSrc, 'rpc.d.ts'); 30 | await run(`pbts -o ${dTsOut} ${jsStaticOut}`); 31 | console.log(`Compiled to TypeScript: "${dTsOut}"`); 32 | } catch (error) { 33 | console.error('Failed to compile protobuf definitions:'); 34 | console.error(error.message); 35 | process.exit(-1); 36 | } 37 | } 38 | 39 | async function run(command) { 40 | const { stdout, stderr } = await exec(command); 41 | console.log(stdout); 42 | console.error(stderr); 43 | } 44 | 45 | generateProtos(); 46 | -------------------------------------------------------------------------------- /scripts/updateVersion.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { execSync } from 'child_process'; 5 | import { readFileSync, readJSONSync, writeFileSync } from 'fs-extra'; 6 | import * as parseArgs from 'minimist'; 7 | import * as path from 'path'; 8 | import * as semver from 'semver'; 9 | 10 | const repoRoot = path.join(__dirname, '..'); 11 | const packageJsonPath = path.join(repoRoot, 'package.json'); 12 | const nuspecPath = path.join(repoRoot, 'Worker.nuspec'); 13 | const nuspecVersionRegex = /(.*)<\/version>/i; 14 | const constantsPath = path.join(repoRoot, 'src', 'constants.ts'); 15 | const constantsVersionRegex = /version = '(.*)'/i; 16 | 17 | const args = parseArgs(process.argv.slice(2)); 18 | if (args.validate) { 19 | validateVersion(); 20 | } else if (args.version) { 21 | updateVersion(args.version); 22 | } else if (args.buildNumber) { 23 | const currentVersion = validateVersion(); 24 | updateVersion(`${currentVersion}-alpha.${args.buildNumber}`); 25 | } else { 26 | console.log(`This script can be used to either update the version of the worker or validate that the repo is in a valid state with regards to versioning. 27 | 28 | Example usage: 29 | 30 | npm run updateVersion -- --version 3.3.0 31 | npm run updateVersion -- --buildNumber 20230517.1 32 | npm run updateVersion -- --validate`); 33 | throw new Error('Invalid arguments'); 34 | } 35 | 36 | function validateVersion(): string { 37 | const packageJson = readJSONSync(packageJsonPath); 38 | const packageJsonVersion = packageJson.version; 39 | 40 | const nuspecVersion = getVersion(nuspecPath, nuspecVersionRegex); 41 | 42 | const constantsVersion = getVersion(constantsPath, constantsVersionRegex); 43 | 44 | console.log('Found the following versions:'); 45 | console.log(`- package.json: ${packageJsonVersion}`); 46 | console.log(`- Worker.nuspec: ${nuspecVersion}`); 47 | console.log(`- src/constants.ts: ${constantsVersion}`); 48 | 49 | const parsedVersion = semver.parse(packageJsonVersion); 50 | 51 | if (!packageJsonVersion || !nuspecVersion || !constantsVersion || !parsedVersion) { 52 | throw new Error('Failed to detect valid versions in all expected files'); 53 | } else if (nuspecVersion !== packageJsonVersion || constantsVersion !== packageJsonVersion) { 54 | throw new Error(`Worker versions do not match.`); 55 | } else { 56 | console.log('Versions match! 🎉'); 57 | return packageJsonVersion; 58 | } 59 | } 60 | 61 | function getVersion(filePath: string, regex: RegExp): string { 62 | const fileContents = readFileSync(filePath).toString(); 63 | const match = fileContents.match(regex); 64 | if (!match) { 65 | throw new Error(`Failed to find match for "${regex.source}".`); 66 | } 67 | return match[1]; 68 | } 69 | 70 | function updateVersion(newVersion: string) { 71 | updatePackageJsonVersion(repoRoot, newVersion); 72 | updateVersionByRegex(nuspecPath, nuspecVersionRegex, newVersion); 73 | updateVersionByRegex(constantsPath, constantsVersionRegex, newVersion); 74 | } 75 | 76 | function updatePackageJsonVersion(cwd: string, newVersion: string) { 77 | execSync(`npm version ${newVersion} --no-git-tag-version --allow-same-version`, { cwd }); 78 | console.log(`Updated ${cwd}/package.json to version ${newVersion}`); 79 | } 80 | 81 | function updateVersionByRegex(filePath: string, regex: RegExp, newVersion: string) { 82 | const oldFileContents = readFileSync(filePath).toString(); 83 | const match = oldFileContents.match(regex); 84 | if (!match) { 85 | throw new Error(`Failed to find match for "${regex.source}".`); 86 | } 87 | const oldLine = match[0]; 88 | const oldVersion = match[1]; 89 | const newLine = oldLine.replace(oldVersion, newVersion); 90 | const newFileContents = oldFileContents.replace(oldLine, newLine); 91 | writeFileSync(filePath, newFileContents); 92 | console.log(`Updated ${filePath} from ${oldVersion} to version ${newVersion}`); 93 | } 94 | -------------------------------------------------------------------------------- /src/AppContext.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { FunctionCallback, HookCallback, HookData, ProgrammingModel } from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { PackageJson } from './parsers/parsePackageJson'; 7 | 8 | export interface RegisteredFunction { 9 | metadata: rpc.IRpcFunctionMetadata; 10 | callback: FunctionCallback; 11 | } 12 | 13 | export interface LegacyRegisteredFunction extends RegisteredFunction { 14 | thisArg: unknown; 15 | } 16 | 17 | export class AppContext { 18 | functionAppDirectory: string | null | undefined; 19 | constructor(functionAppDirectory: string | null | undefined) { 20 | this.functionAppDirectory = functionAppDirectory; 21 | } 22 | packageJson: PackageJson = {}; 23 | /** 24 | * this hook data will be passed to (and set by) all hooks in all scopes 25 | */ 26 | appHookData: HookData = {}; 27 | /** 28 | * this hook data is limited to the app-level scope and persisted only for app-level hooks 29 | */ 30 | appLevelOnlyHookData: HookData = {}; 31 | programmingModel?: ProgrammingModel; 32 | preInvocationHooks: HookCallback[] = []; 33 | postInvocationHooks: HookCallback[] = []; 34 | appStartHooks: HookCallback[] = []; 35 | appTerminateHooks: HookCallback[] = []; 36 | logHooks: HookCallback[] = []; 37 | functions: { [id: string]: RegisteredFunction } = {}; 38 | legacyFunctions: { [id: string]: LegacyRegisteredFunction } = {}; 39 | workerIndexingLocked = false; 40 | isUsingWorkerIndexing = false; 41 | currentEntryPoint?: string; 42 | blockingAppStartError?: Error; 43 | } 44 | -------------------------------------------------------------------------------- /src/Disposable.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | /** 5 | * Based off of VS Code 6 | * https://github.com/microsoft/vscode/blob/7bed4ce3e9f5059b5fc638c348f064edabcce5d2/src/vs/workbench/api/common/extHostTypes.ts#L65 7 | */ 8 | export class Disposable { 9 | static from(...inDisposables: { dispose(): any }[]): Disposable { 10 | let disposables: ReadonlyArray<{ dispose(): any }> | undefined = inDisposables; 11 | return new Disposable(function () { 12 | if (disposables) { 13 | for (const disposable of disposables) { 14 | if (disposable && typeof disposable.dispose === 'function') { 15 | disposable.dispose(); 16 | } 17 | } 18 | disposables = undefined; 19 | } 20 | }); 21 | } 22 | 23 | #callOnDispose?: () => any; 24 | 25 | constructor(callOnDispose: () => any) { 26 | this.#callOnDispose = callOnDispose; 27 | } 28 | 29 | dispose(): any { 30 | if (typeof this.#callOnDispose === 'function') { 31 | this.#callOnDispose(); 32 | this.#callOnDispose = undefined; 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/GrpcClient.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as grpc from '@grpc/grpc-js'; 5 | import { ServiceClientConstructor } from '@grpc/grpc-js/build/src/make-client'; 6 | import * as grpcloader from '@grpc/proto-loader'; 7 | // import protobufjs json descriptor 8 | import * as jsonModule from '../azure-functions-language-worker-protobuf/src/rpc'; 9 | 10 | import rpc = jsonModule.AzureFunctionsRpcMessages; 11 | 12 | function GetGrpcClientConstructor(): ServiceClientConstructor { 13 | const packageDef = grpcloader.fromJSON(jsonModule as protobuf.INamespace, { 14 | objects: true, 15 | defaults: true, 16 | oneofs: true, 17 | }); 18 | const serviceDef = packageDef['AzureFunctionsRpcMessages.FunctionRpc'] as grpcloader.ServiceDefinition; 19 | const clientConstructor: ServiceClientConstructor = grpc.makeClientConstructor(serviceDef, 'FunctionRpc'); 20 | return clientConstructor; 21 | } 22 | 23 | export interface IEventStream { 24 | write(message: rpc.IStreamingMessage); 25 | on(event: 'data', listener: (message: rpc.StreamingMessage) => void); 26 | on(event: string, listener: Function); 27 | end(): void; 28 | } 29 | 30 | export function CreateGrpcEventStream(connection: string, grpcMaxMessageLength: number): IEventStream { 31 | const constructor: ServiceClientConstructor = GetGrpcClientConstructor(); 32 | const clientOptions = { 33 | 'grpc.max_send_message_length': grpcMaxMessageLength, 34 | 'grpc.max_receive_message_length': grpcMaxMessageLength, 35 | }; 36 | const client = new constructor(connection, grpc.credentials.createInsecure(), clientOptions); 37 | process.on('exit', () => { 38 | grpc.closeClient(client); 39 | }); 40 | 41 | const eventStream = client.eventStream(); 42 | 43 | eventStream.on('end', function () { 44 | eventStream.end(); 45 | process.exit(); 46 | }); 47 | return eventStream; 48 | } 49 | -------------------------------------------------------------------------------- /src/LegacyFunctionLoader.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { FunctionCallback } from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { RegisteredFunction } from './AppContext'; 7 | import { AzFuncSystemError } from './errors'; 8 | import { loadScriptFile } from './loadScriptFile'; 9 | import { PackageJson } from './parsers/parsePackageJson'; 10 | import { nonNullProp } from './utils/nonNull'; 11 | import { worker } from './WorkerContext'; 12 | 13 | export async function loadLegacyFunction( 14 | functionId: string, 15 | metadata: rpc.IRpcFunctionMetadata, 16 | packageJson: PackageJson 17 | ): Promise { 18 | if (metadata.isProxy === true) { 19 | return; 20 | } 21 | const script: any = await loadScriptFile(nonNullProp(metadata, 'scriptFile'), packageJson); 22 | const entryPoint = (metadata && metadata.entryPoint); 23 | const [callback, thisArg] = getEntryPoint(script, entryPoint); 24 | worker.app.legacyFunctions[functionId] = { metadata, callback, thisArg }; 25 | } 26 | 27 | export function getLegacyFunction(functionId: string): RegisteredFunction | undefined { 28 | const loadedFunction = worker.app.legacyFunctions[functionId]; 29 | if (loadedFunction) { 30 | return { 31 | metadata: loadedFunction.metadata, 32 | // `bind` is necessary to set the `this` arg, but it's also nice because it makes a clone of the function, preventing this invocation from affecting future invocations 33 | callback: loadedFunction.callback.bind(loadedFunction.thisArg), 34 | }; 35 | } else { 36 | return undefined; 37 | } 38 | } 39 | 40 | function getEntryPoint(f: any, entryPoint?: string): [FunctionCallback, unknown] { 41 | let thisArg: unknown; 42 | if (f !== null && typeof f === 'object') { 43 | thisArg = f; 44 | if (entryPoint) { 45 | // the module exports multiple functions 46 | // and an explicit entry point was named 47 | f = f[entryPoint]; 48 | } else if (Object.keys(f).length === 1) { 49 | // a single named function was exported 50 | const name = Object.keys(f)[0]; 51 | f = f[name]; 52 | } else { 53 | // finally, see if there is an exported function named 54 | // 'run' or 'index' by convention 55 | f = f.run || f.index; 56 | } 57 | } 58 | 59 | if (!f) { 60 | const msg = 61 | (entryPoint 62 | ? `Unable to determine function entry point: ${entryPoint}. ` 63 | : 'Unable to determine function entry point. ') + 64 | 'If multiple functions are exported, ' + 65 | "you must indicate the entry point, either by naming it 'run' or 'index', or by naming it " + 66 | "explicitly via the 'entryPoint' metadata property."; 67 | throw new AzFuncSystemError(msg); 68 | } else if (typeof f !== 'function') { 69 | throw new AzFuncSystemError( 70 | 'The resolved entry point is not a function and cannot be invoked by the functions runtime. Make sure the function has been correctly exported.' 71 | ); 72 | } 73 | 74 | return [f, thisArg]; 75 | } 76 | -------------------------------------------------------------------------------- /src/Worker.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as parseArgs from 'minimist'; 5 | import { AzFuncSystemError, ensureErrorType, trySetErrorMessage } from './errors'; 6 | import { CreateGrpcEventStream } from './GrpcClient'; 7 | import { setupCoreModule } from './setupCoreModule'; 8 | import { setupEventStream } from './setupEventStream'; 9 | import { startBlockedMonitor } from './utils/blockedMonitor'; 10 | import { systemError, systemLog } from './utils/Logger'; 11 | import { isEnvironmentVariableSet } from './utils/util'; 12 | import { worker } from './WorkerContext'; 13 | 14 | export function startNodeWorker(args) { 15 | const parsedArgs = parseArgs(args.slice(2)); 16 | const uri = parsedArgs['functions-uri']; 17 | const workerId = parsedArgs['functions-worker-id']; 18 | const requestId = parsedArgs['functions-request-id']; 19 | const grpcMaxMessageLength = parsedArgs['functions-grpc-max-message-length']; 20 | if (!uri || !workerId || !requestId || !grpcMaxMessageLength) { 21 | systemLog( 22 | 'usage --functions-uri uri --functions-worker-id workerId --functions-request-id requestId --functions-grpc-max-message-length grpcMaxMessageLength' 23 | ); 24 | // Find which arguments are in error 25 | const debugInfo: string[] = []; 26 | if (!uri) debugInfo.push(`'functions-uri' is ${uri}`); 27 | if (!workerId) debugInfo.push(`'functions-worker-id' is ${workerId}`); 28 | if (!requestId) debugInfo.push(`'functions-request-id' is ${requestId}`); 29 | if (!grpcMaxMessageLength) debugInfo.push(`'functions-grpc-max-message-length' is ${grpcMaxMessageLength}`); 30 | 31 | throw new AzFuncSystemError(`gRPC client connection info is missing or incorrect (${debugInfo.join(', ')}).`); 32 | } 33 | worker.id = workerId; 34 | 35 | const connection = new URL(uri).host; 36 | systemLog(`Worker ${workerId} connecting on ${connection}`); 37 | 38 | try { 39 | worker.eventStream = CreateGrpcEventStream(connection, parseInt(grpcMaxMessageLength)); 40 | } catch (err) { 41 | const error = ensureErrorType(err); 42 | error.isAzureFunctionsSystemError = true; 43 | const message = 'Error creating GRPC event stream: ' + error.message; 44 | trySetErrorMessage(error, message); 45 | throw error; 46 | } 47 | 48 | setupEventStream(); 49 | setupCoreModule(); 50 | 51 | worker.eventStream.write({ 52 | requestId: requestId, 53 | startStream: { 54 | workerId: workerId, 55 | }, 56 | }); 57 | 58 | process.on('uncaughtException', (err: unknown) => { 59 | const error = ensureErrorType(err); 60 | let errorMessage: string; 61 | if (error.isAzureFunctionsSystemError) { 62 | errorMessage = `Worker uncaught exception: ${error.stack || err}`; 63 | } else { 64 | errorMessage = `Worker uncaught exception (learn more: https://go.microsoft.com/fwlink/?linkid=2097909 ): ${ 65 | error.stack || err 66 | }`; 67 | } 68 | 69 | systemError(errorMessage); 70 | process.exit(1); 71 | }); 72 | process.on('exit', (code) => { 73 | systemLog(`Worker ${workerId} exited with code ${code}`); 74 | }); 75 | 76 | if (isEnvironmentVariableSet(process.env.AZURE_FUNCTIONS_NODE_BLOCK_LOG)) { 77 | startBlockedMonitor(worker); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/WorkerContext.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { ProgrammingModel } from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { AppContext } from './AppContext'; 7 | import { fromCoreLogLevel } from './coreApi/converters/fromCoreStatusResult'; 8 | import { AzFuncSystemError } from './errors'; 9 | import { IEventStream } from './GrpcClient'; 10 | import { InvocationLogContext, LogHookContext } from './hooks/LogHookContext'; 11 | 12 | class WorkerContext { 13 | app = new AppContext(undefined); 14 | defaultProgrammingModel?: ProgrammingModel; 15 | 16 | /** 17 | * This will only be set after worker init request is received 18 | */ 19 | _hostVersion?: string; 20 | 21 | #id?: string; 22 | #eventStream?: IEventStream; 23 | #notInitializedMsg = 'WorkerContext has not been initialized yet.'; 24 | 25 | get id(): string { 26 | if (!this.#id) { 27 | throw new AzFuncSystemError(this.#notInitializedMsg); 28 | } else { 29 | return this.#id; 30 | } 31 | } 32 | 33 | set id(value: string) { 34 | this.#id = value; 35 | } 36 | 37 | get eventStream(): IEventStream { 38 | if (!this.#eventStream) { 39 | throw new AzFuncSystemError(this.#notInitializedMsg); 40 | } else { 41 | return this.#eventStream; 42 | } 43 | } 44 | 45 | set eventStream(value: IEventStream) { 46 | this.#eventStream = value; 47 | } 48 | 49 | get hostVersion(): string { 50 | if (!this._hostVersion) { 51 | throw new AzFuncSystemError('Cannot access hostVersion before worker init'); 52 | } else { 53 | return this._hostVersion; 54 | } 55 | } 56 | 57 | resetApp(functionAppDirectory: string | null | undefined): void { 58 | this.app = new AppContext(functionAppDirectory); 59 | this.app.programmingModel = this.defaultProgrammingModel; 60 | } 61 | 62 | /** 63 | * Captured logs or relevant details can use the logs property 64 | * @param requestId gRPC message request id 65 | * @param msg gRPC message content 66 | */ 67 | log(log: rpc.IRpcLog, invocationLogCtx?: InvocationLogContext): void { 68 | try { 69 | const logContext = new LogHookContext(log, invocationLogCtx); 70 | for (const callback of worker.app.logHooks) { 71 | callback(logContext); 72 | } 73 | 74 | if (log.logCategory === rpc.RpcLog.RpcLogCategory.User) { 75 | // let hooks change and filter these values, but only for user-generated logs 76 | // system logs should always be sent as-is 77 | log.message = logContext.message; 78 | log.level = fromCoreLogLevel(logContext.level); 79 | } 80 | } catch { 81 | // ignore so that user hooks can't prevent system logs 82 | } 83 | 84 | this.eventStream.write({ 85 | rpcLog: log, 86 | }); 87 | } 88 | } 89 | 90 | export const worker = new WorkerContext(); 91 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | export const version = '3.10.1'; 5 | -------------------------------------------------------------------------------- /src/coreApi/converters/ensureKeysMatch.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | /** 5 | * Removes some unnecessary properties that may have been set to `undefined` during conversion 6 | */ 7 | export function ensureKeysMatch(data: TData, result: TResult): TResult { 8 | for (const key of Object.keys(result)) { 9 | if (!(key in data)) { 10 | delete result[key]; 11 | } 12 | } 13 | return result; 14 | } 15 | -------------------------------------------------------------------------------- /src/coreApi/converters/fromCoreFunctionMetadata.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { fromCoreStatusResult } from './fromCoreStatusResult'; 8 | import { handleDefaultEnumCase } from './handleDefaultEnumCase'; 9 | 10 | export function fromCoreFunctionMetadata(data: coreTypes.RpcFunctionMetadata): rpc.IRpcFunctionMetadata { 11 | const result = { 12 | ...data, 13 | bindings: fromCoreBindings(data.bindings), 14 | status: fromCoreStatusResult(data.status), 15 | retryOptions: fromCoreRetryOptions(data.retryOptions), 16 | }; 17 | return ensureKeysMatch(data, result); 18 | } 19 | 20 | function fromCoreBindings( 21 | data: { [key: string]: coreTypes.RpcBindingInfo } | null | undefined 22 | ): { [key: string]: rpc.IBindingInfo } | null | undefined { 23 | if (data) { 24 | const result = {}; 25 | for (const [key, value] of Object.entries(data)) { 26 | result[key] = fromCoreBinding(value); 27 | } 28 | return ensureKeysMatch(data, result); 29 | } else { 30 | return data; 31 | } 32 | } 33 | 34 | function fromCoreBinding(data: coreTypes.RpcBindingInfo | null | undefined): rpc.IBindingInfo | null | undefined { 35 | if (data) { 36 | const result = { 37 | ...data, 38 | dataType: fromCoreBindingDataType(data.dataType), 39 | direction: fromCoreBindingDirection(data.direction), 40 | }; 41 | return ensureKeysMatch(data, result); 42 | } else { 43 | return data; 44 | } 45 | } 46 | 47 | function fromCoreBindingDataType( 48 | data: coreTypes.RpcBindingDataType | null | undefined 49 | ): rpc.BindingInfo.DataType | null | undefined { 50 | switch (data) { 51 | case 'binary': 52 | return rpc.BindingInfo.DataType.binary; 53 | case 'stream': 54 | return rpc.BindingInfo.DataType.stream; 55 | case 'string': 56 | return rpc.BindingInfo.DataType.string; 57 | case 'undefined': 58 | return rpc.BindingInfo.DataType.undefined; 59 | default: 60 | return handleDefaultEnumCase(data, 'CoreRpcBindingDataType'); 61 | } 62 | } 63 | 64 | function fromCoreBindingDirection( 65 | data: coreTypes.RpcBindingDirection | null | undefined 66 | ): rpc.BindingInfo.Direction | null | undefined { 67 | switch (data) { 68 | case 'in': 69 | return rpc.BindingInfo.Direction.in; 70 | case 'inout': 71 | return rpc.BindingInfo.Direction.inout; 72 | case 'out': 73 | return rpc.BindingInfo.Direction.out; 74 | default: 75 | return handleDefaultEnumCase(data, 'CoreRpcBindingDirection'); 76 | } 77 | } 78 | 79 | function fromCoreRetryOptions( 80 | data: coreTypes.RpcRetryOptions | null | undefined 81 | ): rpc.IRpcRetryOptions | null | undefined { 82 | if (data) { 83 | const result = { 84 | ...data, 85 | retryStrategy: fromCoreRetryStrategy(data.retryStrategy), 86 | }; 87 | return ensureKeysMatch(data, result); 88 | } else { 89 | return data; 90 | } 91 | } 92 | 93 | function fromCoreRetryStrategy( 94 | data: coreTypes.RpcRetryStrategy | null | undefined 95 | ): rpc.RpcRetryOptions.RetryStrategy | null | undefined { 96 | switch (data) { 97 | case 'exponentialBackoff': 98 | return rpc.RpcRetryOptions.RetryStrategy.exponential_backoff; 99 | case 'fixedDelay': 100 | return rpc.RpcRetryOptions.RetryStrategy.fixed_delay; 101 | default: 102 | return handleDefaultEnumCase(data, 'CoreRpcRetryStrategy'); 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /src/coreApi/converters/fromCoreInvocationResponse.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { fromCoreStatusResult } from './fromCoreStatusResult'; 8 | import { fromCoreTypedData } from './fromCoreTypedData'; 9 | 10 | export function fromCoreInvocationResponse(data: coreTypes.RpcInvocationResponse): rpc.IInvocationResponse { 11 | const result = { 12 | ...data, 13 | outputData: fromCoreParameterBindings(data.outputData), 14 | result: fromCoreStatusResult(data.result), 15 | returnValue: fromCoreTypedData(data.returnValue), 16 | }; 17 | return ensureKeysMatch(data, result); 18 | } 19 | 20 | function fromCoreParameterBindings( 21 | data: coreTypes.RpcParameterBinding[] | null | undefined 22 | ): rpc.IParameterBinding[] | null | undefined { 23 | if (data) { 24 | return data.map(fromCoreParameterBinding); 25 | } else { 26 | return data; 27 | } 28 | } 29 | 30 | function fromCoreParameterBinding(data: coreTypes.RpcParameterBinding): rpc.IParameterBinding { 31 | const result = { 32 | ...data, 33 | data: fromCoreTypedData(data.data), 34 | }; 35 | return ensureKeysMatch(data, result); 36 | } 37 | -------------------------------------------------------------------------------- /src/coreApi/converters/fromCoreStatusResult.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { handleDefaultEnumCase } from './handleDefaultEnumCase'; 8 | 9 | export function fromCoreStatusResult( 10 | data: coreTypes.RpcStatusResult | null | undefined 11 | ): rpc.IStatusResult | null | undefined { 12 | if (data) { 13 | const result = { 14 | ...data, 15 | logs: fromCoreLogs(data.logs), 16 | status: fromCoreStatus(data.status), 17 | }; 18 | return ensureKeysMatch(data, result); 19 | } else { 20 | return data; 21 | } 22 | } 23 | 24 | function fromCoreLogs(data: coreTypes.RpcLog[] | null | undefined): rpc.IRpcLog[] | null | undefined { 25 | if (data) { 26 | return data.map(fromCoreLog); 27 | } else { 28 | return data; 29 | } 30 | } 31 | 32 | function fromCoreLog(data: coreTypes.RpcLog): rpc.IRpcLog { 33 | const result = { 34 | ...data, 35 | level: fromCoreLogLevel(data.level), 36 | logCategory: fromCoreLogCategory(data.logCategory), 37 | }; 38 | return ensureKeysMatch(data, result); 39 | } 40 | 41 | export function fromCoreLogLevel(data: coreTypes.RpcLogLevel | null | undefined): rpc.RpcLog.Level | null | undefined { 42 | switch (data) { 43 | case 'critical': 44 | return rpc.RpcLog.Level.Critical; 45 | case 'debug': 46 | return rpc.RpcLog.Level.Debug; 47 | case 'error': 48 | return rpc.RpcLog.Level.Error; 49 | case 'information': 50 | return rpc.RpcLog.Level.Information; 51 | case 'none': 52 | return rpc.RpcLog.Level.None; 53 | case 'trace': 54 | return rpc.RpcLog.Level.Trace; 55 | case 'warning': 56 | return rpc.RpcLog.Level.Warning; 57 | default: 58 | return handleDefaultEnumCase(data, 'CoreRpcLogLevel'); 59 | } 60 | } 61 | 62 | export function fromCoreLogCategory( 63 | data: coreTypes.RpcLogCategory | null | undefined 64 | ): rpc.RpcLog.RpcLogCategory | null | undefined { 65 | switch (data) { 66 | case 'customMetric': 67 | return rpc.RpcLog.RpcLogCategory.CustomMetric; 68 | case 'system': 69 | return rpc.RpcLog.RpcLogCategory.System; 70 | case 'user': 71 | return rpc.RpcLog.RpcLogCategory.User; 72 | default: 73 | return handleDefaultEnumCase(data, 'CoreRpcLogCategory'); 74 | } 75 | } 76 | 77 | function fromCoreStatus(data: coreTypes.RpcStatus | null | undefined): rpc.StatusResult.Status | null | undefined { 78 | switch (data) { 79 | case 'cancelled': 80 | return rpc.StatusResult.Status.Cancelled; 81 | case 'failure': 82 | return rpc.StatusResult.Status.Failure; 83 | case 'success': 84 | return rpc.StatusResult.Status.Success; 85 | default: 86 | return handleDefaultEnumCase(data, 'CoreRpcStatus'); 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/coreApi/converters/fromCoreTypedData.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { handleDefaultEnumCase } from './handleDefaultEnumCase'; 8 | 9 | export function fromCoreTypedData(data: coreTypes.RpcTypedData | null | undefined): rpc.ITypedData | null | undefined { 10 | if (data) { 11 | const result = { 12 | ...data, 13 | http: fromCoreHttpData(data.http), 14 | }; 15 | return ensureKeysMatch(data, result); 16 | } else { 17 | return data; 18 | } 19 | } 20 | 21 | function fromCoreHttpData(data: coreTypes.RpcHttpData | null | undefined): rpc.IRpcHttp | null | undefined { 22 | if (data) { 23 | const result = { 24 | ...data, 25 | body: fromCoreTypedData(data.body), 26 | cookies: fromCoreHttpCookies(data.cookies), 27 | rawBody: fromCoreTypedData(data.rawBody), 28 | }; 29 | return ensureKeysMatch(data, result); 30 | } else { 31 | return data; 32 | } 33 | } 34 | 35 | function fromCoreHttpCookies( 36 | data: coreTypes.RpcHttpCookie[] | null | undefined 37 | ): rpc.IRpcHttpCookie[] | null | undefined { 38 | if (data) { 39 | return data.map(fromCoreHttpCookie); 40 | } else { 41 | return data; 42 | } 43 | } 44 | 45 | function fromCoreHttpCookie(data: coreTypes.RpcHttpCookie): rpc.IRpcHttpCookie { 46 | const result = { 47 | ...data, 48 | sameSite: fromCoreHttpCookieSameSite(data.sameSite), 49 | }; 50 | return ensureKeysMatch(data, result); 51 | } 52 | 53 | function fromCoreHttpCookieSameSite( 54 | data: coreTypes.RpcHttpCookieSameSite | null | undefined 55 | ): rpc.RpcHttpCookie.SameSite | null | undefined { 56 | switch (data) { 57 | case 'explicitNone': 58 | return rpc.RpcHttpCookie.SameSite.ExplicitNone; 59 | case 'lax': 60 | return rpc.RpcHttpCookie.SameSite.Lax; 61 | case 'none': 62 | return rpc.RpcHttpCookie.SameSite.None; 63 | case 'strict': 64 | return rpc.RpcHttpCookie.SameSite.Strict; 65 | default: 66 | return handleDefaultEnumCase(data, 'CoreRpcHttpCookieSameSite'); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/coreApi/converters/handleDefaultEnumCase.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzFuncRangeError } from '../../errors'; 5 | 6 | export function handleDefaultEnumCase(data: undefined | null | string, typeName: string): undefined | null { 7 | switch (data) { 8 | case undefined: 9 | return undefined; 10 | case null: 11 | return null; 12 | default: 13 | throw new AzFuncRangeError(`Unexpected value "${data}" for type "${typeName}"`); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/coreApi/converters/toCoreFunctionMetadata.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { handleDefaultEnumCase } from './handleDefaultEnumCase'; 8 | import { toCoreStatusResult } from './toCoreStatusResult'; 9 | 10 | export function toCoreFunctionMetadata(data: rpc.IRpcFunctionMetadata): coreTypes.RpcFunctionMetadata { 11 | const result = { 12 | ...data, 13 | bindings: toCoreBindings(data.bindings), 14 | status: toCoreStatusResult(data.status), 15 | retryOptions: toCoreRetryOptions(data.retryOptions), 16 | }; 17 | return ensureKeysMatch(data, result); 18 | } 19 | 20 | function toCoreBindings( 21 | data: { [key: string]: rpc.IBindingInfo } | null | undefined 22 | ): { [key: string]: coreTypes.RpcBindingInfo } | null | undefined { 23 | if (data) { 24 | const result = {}; 25 | for (const [key, value] of Object.entries(data)) { 26 | result[key] = toCoreBinding(value); 27 | } 28 | return ensureKeysMatch(data, result); 29 | } else { 30 | return data; 31 | } 32 | } 33 | 34 | function toCoreBinding(data: rpc.IBindingInfo | null | undefined): coreTypes.RpcBindingInfo | null | undefined { 35 | if (data) { 36 | const result = { 37 | ...data, 38 | dataType: toCoreBindingDataType(data.dataType), 39 | direction: toCoreBindingDirection(data.direction), 40 | }; 41 | return ensureKeysMatch(data, result); 42 | } else { 43 | return data; 44 | } 45 | } 46 | 47 | function toCoreBindingDataType( 48 | data: rpc.BindingInfo.DataType | null | undefined 49 | ): coreTypes.RpcBindingDataType | null | undefined { 50 | switch (data) { 51 | case rpc.BindingInfo.DataType.binary: 52 | return 'binary'; 53 | case rpc.BindingInfo.DataType.stream: 54 | return 'stream'; 55 | case rpc.BindingInfo.DataType.string: 56 | return 'string'; 57 | case rpc.BindingInfo.DataType.undefined: 58 | return 'undefined'; 59 | default: 60 | return handleDefaultEnumCase(data, 'RpcBindingDataType'); 61 | } 62 | } 63 | 64 | function toCoreBindingDirection( 65 | data: rpc.BindingInfo.Direction | null | undefined 66 | ): coreTypes.RpcBindingDirection | null | undefined { 67 | switch (data) { 68 | case rpc.BindingInfo.Direction.in: 69 | return 'in'; 70 | case rpc.BindingInfo.Direction.inout: 71 | return 'inout'; 72 | case rpc.BindingInfo.Direction.out: 73 | return 'out'; 74 | default: 75 | return handleDefaultEnumCase(data, 'RpcBindingDirection'); 76 | } 77 | } 78 | 79 | function toCoreRetryOptions( 80 | data: rpc.IRpcRetryOptions | null | undefined 81 | ): coreTypes.RpcRetryOptions | null | undefined { 82 | if (data) { 83 | const result = { 84 | ...data, 85 | retryStrategy: toCoreRetryStrategy(data.retryStrategy), 86 | }; 87 | return ensureKeysMatch(data, result); 88 | } else { 89 | return data; 90 | } 91 | } 92 | 93 | function toCoreRetryStrategy( 94 | data: rpc.RpcRetryOptions.RetryStrategy | null | undefined 95 | ): coreTypes.RpcRetryStrategy | null | undefined { 96 | switch (data) { 97 | case rpc.RpcRetryOptions.RetryStrategy.exponential_backoff: 98 | return 'exponentialBackoff'; 99 | case rpc.RpcRetryOptions.RetryStrategy.fixed_delay: 100 | return 'fixedDelay'; 101 | default: 102 | return handleDefaultEnumCase(data, 'RpcRetryStrategy'); 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /src/coreApi/converters/toCoreInvocationRequest.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { toCoreTypedData } from './toCoreTypedData'; 8 | 9 | export function toCoreInvocationRequest(data: rpc.IInvocationRequest): coreTypes.RpcInvocationRequest { 10 | const result = { 11 | ...data, 12 | inputData: toCoreParameterBindings(data.inputData), 13 | triggerMetadata: toCoreTriggerMetadata(data.triggerMetadata), 14 | }; 15 | return ensureKeysMatch(data, result); 16 | } 17 | 18 | function toCoreParameterBindings( 19 | data: rpc.IParameterBinding[] | null | undefined 20 | ): coreTypes.RpcParameterBinding[] | null | undefined { 21 | if (data) { 22 | return data.map(toCoreParameterBinding); 23 | } else { 24 | return data; 25 | } 26 | } 27 | 28 | function toCoreParameterBinding(data: rpc.IParameterBinding): coreTypes.RpcParameterBinding { 29 | const result = { 30 | ...data, 31 | data: toCoreTypedData(data.data), 32 | }; 33 | return ensureKeysMatch(data, result); 34 | } 35 | 36 | function toCoreTriggerMetadata( 37 | data: { [key: string]: rpc.ITypedData } | null | undefined 38 | ): { [key: string]: coreTypes.RpcTypedData } | null | undefined { 39 | if (data) { 40 | const result = {}; 41 | for (const [key, value] of Object.entries(data)) { 42 | result[key] = toCoreTypedData(value); 43 | } 44 | return ensureKeysMatch(data, result); 45 | } else { 46 | return data; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/coreApi/converters/toCoreStatusResult.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { handleDefaultEnumCase } from './handleDefaultEnumCase'; 8 | 9 | export function toCoreStatusResult( 10 | data: rpc.IStatusResult | null | undefined 11 | ): coreTypes.RpcStatusResult | null | undefined { 12 | if (data) { 13 | const result = { 14 | ...data, 15 | logs: toCoreLogs(data.logs), 16 | status: toCoreStatus(data.status), 17 | }; 18 | return ensureKeysMatch(data, result); 19 | } else { 20 | return data; 21 | } 22 | } 23 | 24 | function toCoreLogs(data: rpc.IRpcLog[] | null | undefined): coreTypes.RpcLog[] | null | undefined { 25 | if (data) { 26 | return data.map(toCoreLog); 27 | } else { 28 | return data; 29 | } 30 | } 31 | 32 | export function toCoreLog(data: rpc.IRpcLog): coreTypes.RpcLog { 33 | const result = { 34 | ...data, 35 | level: toCoreLogLevel(data.level), 36 | logCategory: toCoreLogCategory(data.logCategory), 37 | }; 38 | return ensureKeysMatch(data, result); 39 | } 40 | 41 | function toCoreLogLevel(data: rpc.RpcLog.Level | null | undefined): coreTypes.RpcLogLevel | null | undefined { 42 | switch (data) { 43 | case rpc.RpcLog.Level.Critical: 44 | return 'critical'; 45 | case rpc.RpcLog.Level.Debug: 46 | return 'debug'; 47 | case rpc.RpcLog.Level.Error: 48 | return 'error'; 49 | case rpc.RpcLog.Level.Information: 50 | return 'information'; 51 | case rpc.RpcLog.Level.None: 52 | return 'none'; 53 | case rpc.RpcLog.Level.Trace: 54 | return 'trace'; 55 | case rpc.RpcLog.Level.Warning: 56 | return 'warning'; 57 | default: 58 | return handleDefaultEnumCase(data, 'RpcLogLevel'); 59 | } 60 | } 61 | 62 | function toCoreLogCategory( 63 | data: rpc.RpcLog.RpcLogCategory | null | undefined 64 | ): coreTypes.RpcLogCategory | null | undefined { 65 | switch (data) { 66 | case rpc.RpcLog.RpcLogCategory.CustomMetric: 67 | return 'customMetric'; 68 | case rpc.RpcLog.RpcLogCategory.System: 69 | return 'system'; 70 | case rpc.RpcLog.RpcLogCategory.User: 71 | return 'user'; 72 | default: 73 | return handleDefaultEnumCase(data, 'RpcLogCategory'); 74 | } 75 | } 76 | 77 | function toCoreStatus(data: rpc.StatusResult.Status | null | undefined): coreTypes.RpcStatus | null | undefined { 78 | switch (data) { 79 | case rpc.StatusResult.Status.Cancelled: 80 | return 'cancelled'; 81 | case rpc.StatusResult.Status.Failure: 82 | return 'failure'; 83 | case rpc.StatusResult.Status.Success: 84 | return 'success'; 85 | default: 86 | return handleDefaultEnumCase(data, 'RpcStatus'); 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/coreApi/converters/toCoreTypedData.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ensureKeysMatch } from './ensureKeysMatch'; 7 | import { handleDefaultEnumCase } from './handleDefaultEnumCase'; 8 | 9 | export function toCoreTypedData(data: rpc.ITypedData | null | undefined): coreTypes.RpcTypedData | null | undefined { 10 | if (data) { 11 | const result = { 12 | ...data, 13 | http: toCoreHttpData(data.http), 14 | }; 15 | return ensureKeysMatch(data, result); 16 | } else { 17 | return data; 18 | } 19 | } 20 | 21 | function toCoreHttpData(data: rpc.IRpcHttp | null | undefined): coreTypes.RpcHttpData | null | undefined { 22 | if (data) { 23 | const result = { 24 | ...data, 25 | cookies: toCoreHttpCookies(data.cookies), 26 | body: toCoreTypedData(data.body), 27 | rawBody: toCoreTypedData(data.rawBody), 28 | }; 29 | return ensureKeysMatch(data, result); 30 | } else { 31 | return data; 32 | } 33 | } 34 | 35 | function toCoreHttpCookies( 36 | data: rpc.IRpcHttpCookie[] | null | undefined 37 | ): coreTypes.RpcHttpCookie[] | null | undefined { 38 | if (data) { 39 | return data.map(toCoreHttpCookie); 40 | } else { 41 | return data; 42 | } 43 | } 44 | 45 | function toCoreHttpCookie(data: rpc.IRpcHttpCookie): coreTypes.RpcHttpCookie { 46 | const result = { 47 | ...data, 48 | sameSite: toCoreHttpCookieSameSite(data.sameSite), 49 | }; 50 | return ensureKeysMatch(data, result); 51 | } 52 | 53 | function toCoreHttpCookieSameSite( 54 | data: rpc.RpcHttpCookie.SameSite | null | undefined 55 | ): coreTypes.RpcHttpCookieSameSite | null | undefined { 56 | switch (data) { 57 | case rpc.RpcHttpCookie.SameSite.ExplicitNone: 58 | return 'explicitNone'; 59 | case rpc.RpcHttpCookie.SameSite.Lax: 60 | return 'lax'; 61 | case rpc.RpcHttpCookie.SameSite.None: 62 | return 'none'; 63 | case rpc.RpcHttpCookie.SameSite.Strict: 64 | return 'strict'; 65 | default: 66 | return handleDefaultEnumCase(data, 'RpcHttpCookieSameSite'); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/coreApi/coreApiLog.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { worker } from '../WorkerContext'; 6 | import { fromCoreLogCategory, fromCoreLogLevel } from './converters/fromCoreStatusResult'; 7 | 8 | export function coreApiLog(level: coreTypes.RpcLogLevel, category: coreTypes.RpcLogCategory, message: string): void { 9 | worker.log({ 10 | message, 11 | level: fromCoreLogLevel(level), 12 | logCategory: fromCoreLogCategory(category), 13 | }); 14 | } 15 | -------------------------------------------------------------------------------- /src/coreApi/registerFunction.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { FunctionCallback, FunctionMetadata } from '@azure/functions-core'; 5 | import * as path from 'path'; 6 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 7 | import { Disposable } from '../Disposable'; 8 | import { AzFuncSystemError } from '../errors'; 9 | import { worker } from '../WorkerContext'; 10 | import { fromCoreFunctionMetadata } from './converters/fromCoreFunctionMetadata'; 11 | 12 | export function registerFunction(metadata: FunctionMetadata, callback: FunctionCallback): Disposable { 13 | if (worker.app.workerIndexingLocked) { 14 | throw new AzFuncSystemError('A function can only be registered during app startup.'); 15 | } 16 | worker.app.isUsingWorkerIndexing = true; 17 | 18 | const functionId = metadata.functionId || metadata.name; 19 | if (functionId in worker.app.functions) { 20 | throw new AzFuncSystemError(`A function with id "${functionId}" has already been registered.`); 21 | } 22 | 23 | const rpcMetadata: rpc.IRpcFunctionMetadata = fromCoreFunctionMetadata(metadata); 24 | rpcMetadata.functionId = functionId; 25 | // `rawBindings` is what's actually used by the host 26 | // `bindings` is used by the js library in both the old host indexing and the new worker indexing 27 | rpcMetadata.rawBindings = Object.entries(metadata.bindings).map(([name, binding]) => { 28 | return JSON.stringify({ ...binding, name }); 29 | }); 30 | 31 | // The host validates that the `scriptFile` property is defined. Neither the host nor the worker needs it, but tooling like the portal may use it so we'll make a best guess 32 | // (The real script file may be a separate file referenced from the entry point, or it may be coming from a different entry point entirely if there are some async shenanigans) 33 | if (worker.app.currentEntryPoint) { 34 | rpcMetadata.scriptFile = path.basename(worker.app.currentEntryPoint); 35 | rpcMetadata.directory = path.dirname(worker.app.currentEntryPoint); 36 | } else { 37 | rpcMetadata.scriptFile = 'unknown'; 38 | } 39 | 40 | worker.app.functions[functionId] = { metadata: rpcMetadata, callback }; 41 | 42 | return new Disposable(() => { 43 | if (worker.app.workerIndexingLocked) { 44 | throw new AzFuncSystemError('A function can only be disposed during app startup.'); 45 | } else { 46 | delete worker.app.functions[functionId]; 47 | } 48 | }); 49 | } 50 | -------------------------------------------------------------------------------- /src/coreApi/setProgrammingModel.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { ProgrammingModel } from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { worker } from '../WorkerContext'; 7 | import LogCategory = rpc.RpcLog.RpcLogCategory; 8 | import LogLevel = rpc.RpcLog.Level; 9 | 10 | export function setProgrammingModel(programmingModel: ProgrammingModel): void { 11 | // Log when setting the programming model, except for the initial default one (partially because the grpc channels aren't fully setup at that time) 12 | if (worker.app.programmingModel) { 13 | worker.log({ 14 | message: `Setting Node.js programming model to "${programmingModel.name}" version "${programmingModel.version}"`, 15 | level: LogLevel.Information, 16 | logCategory: LogCategory.System, 17 | }); 18 | } else { 19 | worker.defaultProgrammingModel = programmingModel; 20 | } 21 | worker.app.programmingModel = programmingModel; 22 | } 23 | -------------------------------------------------------------------------------- /src/errors.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | export interface AzFuncError { 5 | /** 6 | * System errors can be tracked in our telemetry 7 | * User errors cannot be tracked in our telemetry because they could have user information (users can still track it themselves in their app insights resource) 8 | */ 9 | isAzureFunctionsSystemError: boolean; 10 | 11 | loggedOverRpc?: boolean; 12 | } 13 | 14 | export interface ValidatedError extends Error, Partial { 15 | /** 16 | * Use `trySetErrorMessage` to set the error message 17 | */ 18 | readonly message: string; 19 | } 20 | 21 | export class AzFuncSystemError extends Error { 22 | isAzureFunctionsSystemError = true; 23 | } 24 | 25 | export class AzFuncTypeError extends TypeError implements AzFuncError { 26 | isAzureFunctionsSystemError = true; 27 | } 28 | 29 | export class AzFuncRangeError extends RangeError implements AzFuncError { 30 | isAzureFunctionsSystemError = true; 31 | } 32 | 33 | export class ReadOnlyError extends AzFuncTypeError { 34 | constructor(propertyName: string) { 35 | super(`Cannot assign to read only property '${propertyName}'`); 36 | } 37 | } 38 | 39 | export function ensureErrorType(err: unknown): ValidatedError { 40 | if (err instanceof Error) { 41 | return err; 42 | } else { 43 | let message: string; 44 | if (err === undefined || err === null) { 45 | message = 'Unknown error'; 46 | } else if (typeof err === 'string') { 47 | message = err; 48 | } else if (typeof err === 'object') { 49 | message = JSON.stringify(err); 50 | } else { 51 | message = String(err); 52 | } 53 | return new Error(message); 54 | } 55 | } 56 | 57 | export function trySetErrorMessage(err: Error, message: string): void { 58 | try { 59 | err.message = message; 60 | } catch { 61 | // If we can't set the message, we'll keep the error as is 62 | } 63 | } 64 | 65 | /** 66 | * This is mostly for callbacks where `null` or `undefined` indicates there is no error 67 | * By contrast, anything thrown/caught is assumed to be an error regardless of what it is 68 | */ 69 | export function isError(err: unknown): boolean { 70 | return err !== null && err !== undefined; 71 | } 72 | -------------------------------------------------------------------------------- /src/eventHandlers/EventHandler.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 5 | 6 | export type SupportedRequestName = 7 | | 'functionEnvironmentReloadRequest' 8 | | 'functionLoadRequest' 9 | | 'invocationRequest' 10 | | 'workerInitRequest' 11 | | 'functionsMetadataRequest'; 12 | export type SupportedRequest = rpc.StreamingMessage[SupportedRequestName]; 13 | 14 | export type SupportedResponseName = 15 | | 'functionEnvironmentReloadResponse' 16 | | 'functionLoadResponse' 17 | | 'invocationResponse' 18 | | 'workerInitResponse' 19 | | 'functionMetadataResponse'; 20 | export type SupportedResponse = rpc.StreamingMessage[SupportedResponseName]; 21 | 22 | export abstract class EventHandler< 23 | TRequestName extends SupportedRequestName = SupportedRequestName, 24 | TResponseName extends SupportedResponseName = SupportedResponseName, 25 | TRequest = NonNullable, 26 | TResponse = NonNullable 27 | > { 28 | abstract readonly responseName: TResponseName; 29 | 30 | /** 31 | * The default response with any properties unique to this request that should be set for both success & failure scenarios 32 | */ 33 | abstract getDefaultResponse(request: TRequest): TResponse; 34 | 35 | /** 36 | * Handles the event and returns the response 37 | * NOTE: This method does not need to set the result/status. That will be handled in code common to all event handlers 38 | */ 39 | abstract handleEvent(request: TRequest): Promise; 40 | } 41 | -------------------------------------------------------------------------------- /src/eventHandlers/FunctionEnvironmentReloadHandler.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 5 | import { startApp } from '../startApp'; 6 | import { worker } from '../WorkerContext'; 7 | import { EventHandler } from './EventHandler'; 8 | import { getWorkerCapabilities } from './getWorkerCapabilities'; 9 | import { getWorkerMetadata } from './getWorkerMetadata'; 10 | import LogCategory = rpc.RpcLog.RpcLogCategory; 11 | import LogLevel = rpc.RpcLog.Level; 12 | import CapabilitiesUpdateStrategy = rpc.FunctionEnvironmentReloadResponse.CapabilitiesUpdateStrategy; 13 | import * as path from 'path'; 14 | 15 | /** 16 | * Environment variables from the current process 17 | */ 18 | export class FunctionEnvironmentReloadHandler extends EventHandler< 19 | 'functionEnvironmentReloadRequest', 20 | 'functionEnvironmentReloadResponse' 21 | > { 22 | readonly responseName = 'functionEnvironmentReloadResponse'; 23 | 24 | getDefaultResponse(_msg: rpc.IFunctionEnvironmentReloadRequest): rpc.IFunctionEnvironmentReloadResponse { 25 | return { 26 | workerMetadata: getWorkerMetadata(), 27 | }; 28 | } 29 | 30 | async handleEvent(msg: rpc.IFunctionEnvironmentReloadRequest): Promise { 31 | if (!msg.functionAppDirectory) { 32 | worker.log({ 33 | message: `FunctionEnvironmentReload functionAppDirectory is not defined`, 34 | level: LogLevel.Debug, 35 | logCategory: LogCategory.System, 36 | }); 37 | } 38 | 39 | if ( 40 | worker.app.functionAppDirectory && 41 | msg.functionAppDirectory && 42 | isPathEqual(worker.app.functionAppDirectory, msg.functionAppDirectory) 43 | ) { 44 | worker.log({ 45 | message: `FunctionEnvironmentReload functionAppDirectory has not changed`, 46 | level: LogLevel.Debug, 47 | logCategory: LogCategory.System, 48 | }); 49 | } 50 | 51 | worker.resetApp(msg.functionAppDirectory); 52 | 53 | const response = this.getDefaultResponse(msg); 54 | 55 | // Add environment variables from incoming 56 | const numVariables = (msg.environmentVariables && Object.keys(msg.environmentVariables).length) || 0; 57 | worker.log({ 58 | message: `Reloading environment variables. Found ${numVariables} variables to reload.`, 59 | level: LogLevel.Information, 60 | logCategory: LogCategory.System, 61 | }); 62 | 63 | // reset existing env vars 64 | Object.keys(process.env).map((key) => delete process.env[key]); 65 | // set new env vars 66 | Object.assign(process.env, msg.environmentVariables); 67 | 68 | // Change current working directory 69 | if (msg.functionAppDirectory) { 70 | worker.log({ 71 | message: `Changing current working directory to ${msg.functionAppDirectory}`, 72 | level: LogLevel.Information, 73 | logCategory: LogCategory.System, 74 | }); 75 | process.chdir(msg.functionAppDirectory); 76 | await startApp(msg.functionAppDirectory); 77 | // model info may have changed, so we need to update this 78 | response.workerMetadata = getWorkerMetadata(); 79 | } 80 | 81 | response.capabilities = await getWorkerCapabilities(); 82 | response.capabilitiesUpdateStrategy = CapabilitiesUpdateStrategy.replace; 83 | 84 | return response; 85 | } 86 | } 87 | 88 | function isPathEqual(path1: string, path2: string): boolean { 89 | return path.relative(path1, path2) === ''; 90 | } 91 | -------------------------------------------------------------------------------- /src/eventHandlers/FunctionLoadHandler.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 5 | import { ensureErrorType, trySetErrorMessage } from '../errors'; 6 | import { loadLegacyFunction } from '../LegacyFunctionLoader'; 7 | import { isDefined, nonNullProp } from '../utils/nonNull'; 8 | import { worker } from '../WorkerContext'; 9 | import { EventHandler } from './EventHandler'; 10 | import LogCategory = rpc.RpcLog.RpcLogCategory; 11 | import LogLevel = rpc.RpcLog.Level; 12 | 13 | /** 14 | * Worker responds after loading required metadata to load function with the load result 15 | */ 16 | export class FunctionLoadHandler extends EventHandler<'functionLoadRequest', 'functionLoadResponse'> { 17 | readonly responseName = 'functionLoadResponse'; 18 | 19 | getDefaultResponse(msg: rpc.IFunctionLoadRequest): rpc.IFunctionLoadResponse { 20 | return { functionId: msg.functionId }; 21 | } 22 | 23 | async handleEvent(msg: rpc.IFunctionLoadRequest): Promise { 24 | worker.app.workerIndexingLocked = true; 25 | 26 | const response = this.getDefaultResponse(msg); 27 | 28 | worker.log({ 29 | message: `Worker ${worker.id} received FunctionLoadRequest`, 30 | level: LogLevel.Debug, 31 | logCategory: LogCategory.System, 32 | }); 33 | 34 | if (isDefined(worker.app.blockingAppStartError)) { 35 | throw worker.app.blockingAppStartError; 36 | } 37 | 38 | if (!worker.app.isUsingWorkerIndexing) { 39 | const functionId = nonNullProp(msg, 'functionId'); 40 | const metadata = nonNullProp(msg, 'metadata'); 41 | try { 42 | await loadLegacyFunction(functionId, metadata, worker.app.packageJson); 43 | } catch (err) { 44 | const error = ensureErrorType(err); 45 | error.isAzureFunctionsSystemError = true; 46 | const message = `Worker was unable to load function ${metadata.name}: '${error.message}'`; 47 | trySetErrorMessage(error, message); 48 | throw error; 49 | } 50 | } 51 | 52 | return response; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/eventHandlers/FunctionsMetadataHandler.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 5 | import { isDefined } from '../utils/nonNull'; 6 | import { worker } from '../WorkerContext'; 7 | import { EventHandler } from './EventHandler'; 8 | import LogCategory = rpc.RpcLog.RpcLogCategory; 9 | import LogLevel = rpc.RpcLog.Level; 10 | 11 | export class FunctionsMetadataHandler extends EventHandler<'functionsMetadataRequest', 'functionMetadataResponse'> { 12 | readonly responseName = 'functionMetadataResponse'; 13 | 14 | getDefaultResponse(_msg: rpc.IFunctionsMetadataRequest): rpc.IFunctionMetadataResponse { 15 | return { 16 | useDefaultMetadataIndexing: !worker.app.isUsingWorkerIndexing, 17 | }; 18 | } 19 | 20 | async handleEvent(msg: rpc.IFunctionsMetadataRequest): Promise { 21 | worker.app.workerIndexingLocked = true; 22 | 23 | const response = this.getDefaultResponse(msg); 24 | 25 | worker.log({ 26 | message: `Worker ${worker.id} received FunctionsMetadataRequest`, 27 | level: LogLevel.Debug, 28 | logCategory: LogCategory.System, 29 | }); 30 | 31 | if (worker.app.isUsingWorkerIndexing) { 32 | if (isDefined(worker.app.blockingAppStartError)) { 33 | throw worker.app.blockingAppStartError; 34 | } 35 | 36 | response.functionMetadataResults = Object.values(worker.app.functions).map((f) => f.metadata); 37 | } 38 | 39 | return response; 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/eventHandlers/InvocationHandler.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { 6 | HookData, 7 | InvocationState, 8 | PostInvocationContext, 9 | PreInvocationContext, 10 | ProgrammingModel, 11 | RpcFunctionMetadata, 12 | RpcInvocationRequest, 13 | RpcLogCategory, 14 | RpcLogLevel, 15 | } from '@azure/functions-core'; 16 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 17 | import { RegisteredFunction } from '../AppContext'; 18 | import { fromCoreInvocationResponse } from '../coreApi/converters/fromCoreInvocationResponse'; 19 | import { fromCoreLogCategory, fromCoreLogLevel } from '../coreApi/converters/fromCoreStatusResult'; 20 | import { toCoreFunctionMetadata } from '../coreApi/converters/toCoreFunctionMetadata'; 21 | import { toCoreInvocationRequest } from '../coreApi/converters/toCoreInvocationRequest'; 22 | import { AzFuncSystemError, isError, ReadOnlyError } from '../errors'; 23 | import { executeHooks } from '../hooks/executeHooks'; 24 | import { InvocationLogContext } from '../hooks/LogHookContext'; 25 | import { getLegacyFunction } from '../LegacyFunctionLoader'; 26 | import { nonNullProp } from '../utils/nonNull'; 27 | import { worker } from '../WorkerContext'; 28 | import { EventHandler } from './EventHandler'; 29 | 30 | /** 31 | * Host requests worker to invoke a Function 32 | */ 33 | export class InvocationHandler extends EventHandler<'invocationRequest', 'invocationResponse'> { 34 | readonly responseName = 'invocationResponse'; 35 | 36 | getDefaultResponse(msg: rpc.IInvocationRequest): rpc.IInvocationResponse { 37 | return { invocationId: msg.invocationId }; 38 | } 39 | 40 | async handleEvent(msg: rpc.IInvocationRequest): Promise { 41 | const functionId = nonNullProp(msg, 'functionId'); 42 | let registeredFunc: RegisteredFunction | undefined; 43 | if (worker.app.isUsingWorkerIndexing) { 44 | registeredFunc = worker.app.functions[functionId]; 45 | } else { 46 | registeredFunc = getLegacyFunction(functionId); 47 | } 48 | 49 | if (!registeredFunc) { 50 | throw new AzFuncSystemError(`Function code for '${functionId}' is not loaded and cannot be invoked.`); 51 | } 52 | 53 | let { metadata, callback } = registeredFunc; 54 | 55 | const msgCategory = `${nonNullProp(metadata, 'name')}.Invocation`; 56 | const coreCtx = new CoreInvocationContext( 57 | toCoreInvocationRequest(msg), 58 | toCoreFunctionMetadata(metadata), 59 | msgCategory 60 | ); 61 | 62 | // Log invocation details to ensure the invocation received by node worker 63 | coreCtx.log( 64 | 'debug', 65 | 'system', 66 | `Worker ${worker.id} received FunctionInvocationRequest with invocationId ${msg.invocationId}` 67 | ); 68 | 69 | const programmingModel: ProgrammingModel = nonNullProp(worker.app, 'programmingModel'); 70 | const invocModel = await programmingModel.getInvocationModel(coreCtx); 71 | 72 | const hookData: HookData = {}; 73 | let { context, inputs } = await invocModel.getArguments(); 74 | coreCtx.logContext = { hookData, invocationContext: context }; 75 | 76 | const preInvocContext: PreInvocationContext = { 77 | get hookData() { 78 | return hookData; 79 | }, 80 | set hookData(_obj) { 81 | throw new ReadOnlyError('hookData'); 82 | }, 83 | get appHookData() { 84 | return worker.app.appHookData; 85 | }, 86 | set appHookData(_obj) { 87 | throw new ReadOnlyError('appHookData'); 88 | }, 89 | get invocationContext() { 90 | return context; 91 | }, 92 | set invocationContext(_obj) { 93 | throw new ReadOnlyError('invocationContext'); 94 | }, 95 | functionCallback: callback, 96 | inputs, 97 | }; 98 | 99 | coreCtx.state = 'preInvocationHooks'; 100 | try { 101 | await executeHooks('preInvocation', preInvocContext, msg.invocationId, msgCategory); 102 | } finally { 103 | coreCtx.state = undefined; 104 | } 105 | 106 | inputs = preInvocContext.inputs; 107 | callback = preInvocContext.functionCallback; 108 | 109 | const postInvocContext: PostInvocationContext = { 110 | get hookData() { 111 | return hookData; 112 | }, 113 | set hookData(_obj) { 114 | throw new ReadOnlyError('hookData'); 115 | }, 116 | get appHookData() { 117 | return worker.app.appHookData; 118 | }, 119 | set appHookData(_obj) { 120 | throw new ReadOnlyError('appHookData'); 121 | }, 122 | get invocationContext() { 123 | return context; 124 | }, 125 | set invocationContext(_obj) { 126 | throw new ReadOnlyError('invocationContext'); 127 | }, 128 | inputs, 129 | result: null, 130 | error: null, 131 | }; 132 | 133 | coreCtx.state = 'invocation'; 134 | try { 135 | postInvocContext.result = await invocModel.invokeFunction(context, inputs, callback); 136 | } catch (err) { 137 | postInvocContext.error = err; 138 | } finally { 139 | coreCtx.state = undefined; 140 | } 141 | 142 | coreCtx.state = 'postInvocationHooks'; 143 | try { 144 | await executeHooks('postInvocation', postInvocContext, msg.invocationId, msgCategory); 145 | } finally { 146 | coreCtx.state = undefined; 147 | } 148 | 149 | if (isError(postInvocContext.error)) { 150 | throw postInvocContext.error; 151 | } 152 | 153 | return fromCoreInvocationResponse(await invocModel.getResponse(context, postInvocContext.result)); 154 | } 155 | } 156 | 157 | class CoreInvocationContext implements coreTypes.CoreInvocationContext { 158 | invocationId: string; 159 | request: RpcInvocationRequest; 160 | metadata: RpcFunctionMetadata; 161 | state?: InvocationState; 162 | logContext?: InvocationLogContext; 163 | #msgCategory: string; 164 | 165 | constructor(request: RpcInvocationRequest, metadata: RpcFunctionMetadata, msgCategory: string) { 166 | this.invocationId = nonNullProp(request, 'invocationId'); 167 | this.request = request; 168 | this.metadata = metadata; 169 | this.#msgCategory = msgCategory; 170 | } 171 | 172 | log(level: RpcLogLevel, logCategory: RpcLogCategory, message: string): void { 173 | worker.log( 174 | { 175 | invocationId: this.request.invocationId, 176 | category: this.#msgCategory, 177 | message, 178 | level: fromCoreLogLevel(level), 179 | logCategory: fromCoreLogCategory(logCategory), 180 | }, 181 | this.logContext 182 | ); 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /src/eventHandlers/WorkerInitHandler.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { access, constants } from 'fs'; 5 | import * as path from 'path'; 6 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 7 | import { isError } from '../errors'; 8 | import { startApp } from '../startApp'; 9 | import { nonNullProp } from '../utils/nonNull'; 10 | import { worker } from '../WorkerContext'; 11 | import { EventHandler } from './EventHandler'; 12 | import { getWorkerCapabilities } from './getWorkerCapabilities'; 13 | import { getWorkerMetadata } from './getWorkerMetadata'; 14 | import LogCategory = rpc.RpcLog.RpcLogCategory; 15 | import LogLevel = rpc.RpcLog.Level; 16 | 17 | /** 18 | * Host sends capabilities/init data to worker and requests the worker to initialize itself 19 | */ 20 | export class WorkerInitHandler extends EventHandler<'workerInitRequest', 'workerInitResponse'> { 21 | readonly responseName = 'workerInitResponse'; 22 | 23 | getDefaultResponse(_msg: rpc.IWorkerInitRequest): rpc.IWorkerInitResponse { 24 | return { 25 | workerMetadata: getWorkerMetadata(), 26 | }; 27 | } 28 | 29 | async handleEvent(msg: rpc.IWorkerInitRequest): Promise { 30 | if (!msg.functionAppDirectory) { 31 | worker.log({ 32 | message: `WorkerInit functionAppDirectory is not defined`, 33 | level: LogLevel.Debug, 34 | logCategory: LogCategory.System, 35 | }); 36 | } 37 | worker.app.functionAppDirectory = msg.functionAppDirectory; 38 | 39 | const response = this.getDefaultResponse(msg); 40 | 41 | worker.log({ 42 | message: `Worker ${worker.id} received WorkerInitRequest`, 43 | level: LogLevel.Debug, 44 | logCategory: LogCategory.System, 45 | }); 46 | 47 | logColdStartWarning(); 48 | 49 | worker._hostVersion = nonNullProp(msg, 'hostVersion'); 50 | 51 | if (msg.functionAppDirectory) { 52 | await startApp(msg.functionAppDirectory); 53 | // model info may have changed, so we need to update this 54 | response.workerMetadata = getWorkerMetadata(); 55 | } 56 | 57 | response.capabilities = await getWorkerCapabilities(); 58 | 59 | return response; 60 | } 61 | } 62 | 63 | export function logColdStartWarning(delayInMs?: number): void { 64 | // On reading a js file with function code('require') NodeJs tries to find 'package.json' all the way up to the file system root. 65 | // In Azure files it causes a delay during cold start as connection to Azure Files is an expensive operation. 66 | const scriptRoot = process.env.AzureWebJobsScriptRoot; 67 | if (process.env.WEBSITE_CONTENTAZUREFILECONNECTIONSTRING && process.env.WEBSITE_CONTENTSHARE && scriptRoot) { 68 | // Add delay to avoid affecting coldstart 69 | if (!delayInMs) { 70 | delayInMs = 5000; 71 | } 72 | setTimeout(() => { 73 | access(path.join(scriptRoot, 'package.json'), constants.F_OK, (err) => { 74 | if (isError(err)) { 75 | worker.log({ 76 | message: 77 | 'package.json is not found at the root of the Function App in Azure Files - cold start for NodeJs can be affected.', 78 | level: LogLevel.Debug, 79 | logCategory: LogCategory.System, 80 | }); 81 | } 82 | }); 83 | }, delayInMs); 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /src/eventHandlers/getWorkerCapabilities.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { WorkerCapabilities } from '@azure/functions-core'; 5 | import { worker } from '../WorkerContext'; 6 | 7 | export async function getWorkerCapabilities(): Promise { 8 | let capabilities: WorkerCapabilities = { 9 | RawHttpBodyBytes: 'true', 10 | RpcHttpTriggerMetadataRemoved: 'true', 11 | RpcHttpBodyOnly: 'true', 12 | IgnoreEmptyValuedRpcHttpHeaders: 'true', 13 | UseNullableValueDictionaryForHttp: 'true', 14 | WorkerStatus: 'true', 15 | TypedDataCollection: 'true', 16 | HandlesWorkerTerminateMessage: 'true', 17 | }; 18 | 19 | if (worker.app.programmingModel?.getCapabilities) { 20 | capabilities = await worker.app.programmingModel.getCapabilities(capabilities); 21 | } 22 | 23 | return capabilities; 24 | } 25 | -------------------------------------------------------------------------------- /src/eventHandlers/getWorkerMetadata.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 5 | import { version as workerVersion } from '../constants'; 6 | import { worker } from '../WorkerContext'; 7 | 8 | export function getWorkerMetadata(): rpc.IWorkerMetadata { 9 | const result: rpc.IWorkerMetadata = { 10 | runtimeName: 'node', 11 | runtimeVersion: process.versions.node, 12 | // analytics team wants bitness to be consistent across workers, so we have to adjust this 13 | workerBitness: process.arch === 'ia32' ? 'x86' : process.arch, 14 | workerVersion, 15 | }; 16 | if (worker.app.programmingModel) { 17 | result.customProperties = { 18 | modelName: worker.app.programmingModel.name, 19 | modelVersion: worker.app.programmingModel.version, 20 | }; 21 | } 22 | return result; 23 | } 24 | -------------------------------------------------------------------------------- /src/eventHandlers/terminateWorker.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AppTerminateContext } from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { ReadOnlyError } from '../errors'; 7 | import { executeHooks } from '../hooks/executeHooks'; 8 | import { worker } from '../WorkerContext'; 9 | import LogCategory = rpc.RpcLog.RpcLogCategory; 10 | import LogLevel = rpc.RpcLog.Level; 11 | 12 | export async function terminateWorker(_msg: rpc.IWorkerTerminate) { 13 | worker.log({ 14 | message: 'Received workerTerminate message; gracefully shutting down worker', 15 | level: LogLevel.Debug, 16 | logCategory: LogCategory.System, 17 | }); 18 | 19 | const appTerminateContext: AppTerminateContext = { 20 | get hookData() { 21 | return worker.app.appLevelOnlyHookData; 22 | }, 23 | set hookData(_obj) { 24 | throw new ReadOnlyError('hookData'); 25 | }, 26 | get appHookData() { 27 | return worker.app.appHookData; 28 | }, 29 | set appHookData(_obj) { 30 | throw new ReadOnlyError('appHookData'); 31 | }, 32 | }; 33 | 34 | await executeHooks('appTerminate', appTerminateContext); 35 | 36 | worker.eventStream.end(); 37 | process.exit(0); 38 | } 39 | -------------------------------------------------------------------------------- /src/hooks/LogHookContext.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { HookData, RpcLogCategory, RpcLogLevel } from '@azure/functions-core'; 5 | import * as coreTypes from '@azure/functions-core'; 6 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 7 | import { toCoreLog } from '../coreApi/converters/toCoreStatusResult'; 8 | import { ReadOnlyError } from '../errors'; 9 | import { nonNullProp } from '../utils/nonNull'; 10 | import { worker } from '../WorkerContext'; 11 | 12 | export interface InvocationLogContext { 13 | hookData: HookData; 14 | invocationContext: unknown; 15 | } 16 | 17 | export class LogHookContext implements coreTypes.LogHookContext { 18 | level: RpcLogLevel; 19 | message: string; 20 | #category: RpcLogCategory; 21 | #hookData: HookData; 22 | #invocationContext: unknown; 23 | 24 | constructor(log: rpc.IRpcLog, invocLogCtx: InvocationLogContext | undefined) { 25 | const coreLog = toCoreLog(log); 26 | this.level = nonNullProp(coreLog, 'level'); 27 | this.message = nonNullProp(coreLog, 'message'); 28 | this.#category = nonNullProp(coreLog, 'logCategory'); 29 | this.#hookData = invocLogCtx?.hookData ?? {}; 30 | this.#invocationContext = invocLogCtx?.invocationContext; 31 | } 32 | 33 | get hookData(): HookData { 34 | return this.#hookData; 35 | } 36 | set hookData(_obj: HookData) { 37 | throw new ReadOnlyError('hookData'); 38 | } 39 | get category(): RpcLogCategory { 40 | return this.#category; 41 | } 42 | set category(_obj: RpcLogCategory) { 43 | throw new ReadOnlyError('category'); 44 | } 45 | get appHookData(): HookData { 46 | return worker.app.appHookData; 47 | } 48 | set appHookData(_obj: HookData) { 49 | throw new ReadOnlyError('appHookData'); 50 | } 51 | get invocationContext(): unknown { 52 | return this.#invocationContext; 53 | } 54 | set invocationContext(_obj: unknown) { 55 | throw new ReadOnlyError('invocationContext'); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/hooks/executeHooks.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { HookContext } from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { worker } from '../WorkerContext'; 7 | import { getHooks } from './getHooks'; 8 | import LogLevel = rpc.RpcLog.Level; 9 | import LogCategory = rpc.RpcLog.RpcLogCategory; 10 | 11 | export async function executeHooks( 12 | hookName: string, 13 | context: HookContext, 14 | invocationId?: string | null, 15 | msgCategory?: string 16 | ): Promise { 17 | const callbacks = getHooks(hookName); 18 | if (callbacks.length > 0) { 19 | worker.log({ 20 | message: `Executing ${callbacks.length} "${hookName}" hooks`, 21 | level: LogLevel.Debug, 22 | logCategory: LogCategory.System, 23 | invocationId, 24 | category: msgCategory, 25 | }); 26 | for (const callback of callbacks) { 27 | await callback(context); 28 | } 29 | worker.log({ 30 | message: `Executed "${hookName}" hooks`, 31 | level: LogLevel.Debug, 32 | logCategory: LogCategory.System, 33 | invocationId, 34 | category: msgCategory, 35 | }); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/hooks/getHooks.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { HookCallback } from '@azure/functions-core'; 5 | import { AzFuncRangeError } from '../errors'; 6 | import { worker } from '../WorkerContext'; 7 | 8 | export function getHooks(hookName: string): HookCallback[] { 9 | switch (hookName) { 10 | case 'preInvocation': 11 | return worker.app.preInvocationHooks; 12 | case 'postInvocation': 13 | return worker.app.postInvocationHooks; 14 | case 'appStart': 15 | return worker.app.appStartHooks; 16 | case 'appTerminate': 17 | return worker.app.appTerminateHooks; 18 | case 'log': 19 | return worker.app.logHooks; 20 | default: 21 | throw new AzFuncRangeError(`Unrecognized hook "${hookName}"`); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/hooks/registerHook.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { HookCallback } from '@azure/functions-core'; 5 | import { Disposable } from '../Disposable'; 6 | import { getHooks } from './getHooks'; 7 | 8 | export function registerHook(hookName: string, callback: HookCallback): Disposable { 9 | const hooks = getHooks(hookName); 10 | hooks.push(callback); 11 | return new Disposable(() => { 12 | const index = hooks.indexOf(callback); 13 | if (index > -1) { 14 | hooks.splice(index, 1); 15 | } 16 | }); 17 | } 18 | -------------------------------------------------------------------------------- /src/loadScriptFile.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as retry from 'p-retry'; 5 | import * as path from 'path'; 6 | import * as url from 'url'; 7 | import { AzureFunctionsRpcMessages as rpc } from '../azure-functions-language-worker-protobuf/src/rpc'; 8 | import { AzFuncSystemError } from './errors'; 9 | import { PackageJson } from './parsers/parsePackageJson'; 10 | import { worker } from './WorkerContext'; 11 | import LogCategory = rpc.RpcLog.RpcLogCategory; 12 | import LogLevel = rpc.RpcLog.Level; 13 | 14 | let hasLoggedAttempt = 0; 15 | let hasLoggedWarning = false; 16 | 17 | export async function loadScriptFile(filePath: string, packageJson: PackageJson): Promise { 18 | // See the following issue for more details on why we want to retry 19 | // https://github.com/Azure/azure-functions-nodejs-worker/issues/693 20 | const retries = 9; 21 | return await retry( 22 | async (currentAttempt: number) => { 23 | if (currentAttempt > 1 && currentAttempt > hasLoggedAttempt) { 24 | worker.log({ 25 | message: `Retrying file load. Attempt ${currentAttempt}/${retries + 1}`, 26 | level: LogLevel.Debug, 27 | logCategory: LogCategory.System, 28 | }); 29 | hasLoggedAttempt = currentAttempt; 30 | } 31 | return loadScriptFileInternal(filePath, packageJson); 32 | }, 33 | { 34 | retries: retries, 35 | minTimeout: 500, 36 | onFailedAttempt: (error) => { 37 | if (!/lstat.*home/i.test(error?.message || '')) { 38 | // this will abort the retries if it's an error we don't recognize 39 | throw error; 40 | } else if (error.retriesLeft > 0 && !hasLoggedWarning) { 41 | worker.log({ 42 | message: `Warning: Failed to load file with error "${error.message}"`, 43 | level: LogLevel.Warning, 44 | logCategory: LogCategory.System, 45 | }); 46 | hasLoggedWarning = true; 47 | } 48 | }, 49 | } 50 | ); 51 | } 52 | 53 | async function loadScriptFileInternal(filePath: string, packageJson: PackageJson): Promise { 54 | const start = Date.now(); 55 | try { 56 | let script: unknown; 57 | if (isESModule(filePath, packageJson)) { 58 | const fileUrl = url.pathToFileURL(filePath); 59 | if (fileUrl.href) { 60 | // use eval so it doesn't get compiled into a require() 61 | script = await eval('import(fileUrl.href)'); 62 | } else { 63 | throw new AzFuncSystemError(`'${filePath}' could not be converted to file URL (${fileUrl.href})`); 64 | } 65 | } else { 66 | script = require(/* webpackIgnore: true */ filePath); 67 | } 68 | return script; 69 | } finally { 70 | warnIfLongLoadTime(filePath, start); 71 | } 72 | } 73 | 74 | function warnIfLongLoadTime(filePath: string, start: number): void { 75 | const timeElapsed = Date.now() - start; 76 | const rfpName = 'WEBSITE_RUN_FROM_PACKAGE'; 77 | const rfpValue = process.env[rfpName]; 78 | if ( 79 | timeElapsed > 1000 && 80 | (rfpValue === undefined || rfpValue === '0') && 81 | process.env.AZURE_FUNCTIONS_ENVIRONMENT !== 'Development' // don't show in core tools 82 | ) { 83 | worker.log({ 84 | message: `Loading "${path.basename(filePath)}" took ${timeElapsed}ms`, 85 | level: LogLevel.Warning, 86 | logCategory: LogCategory.System, 87 | }); 88 | worker.log({ 89 | message: `Set "${rfpName}" to "1" to significantly improve load times. Learn more here: https://aka.ms/AAjon54`, 90 | level: LogLevel.Warning, 91 | logCategory: LogCategory.System, 92 | }); 93 | } 94 | } 95 | 96 | export function isESModule(filePath: string, packageJson: PackageJson): boolean { 97 | if (filePath.endsWith('.mjs')) { 98 | return true; 99 | } 100 | if (filePath.endsWith('.cjs')) { 101 | return false; 102 | } 103 | return packageJson.type === 'module'; 104 | } 105 | -------------------------------------------------------------------------------- /src/nodejsWorker.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | const logPrefix = 'LanguageWorkerConsoleLog'; 5 | const errorPrefix = logPrefix + '[error] '; 6 | const warnPrefix = logPrefix + '[warn] '; 7 | const supportedVersions: string[] = ['v14', 'v16', 'v18', 'v20', 'v22']; 8 | const devOnlyVersions: string[] = ['v15', 'v17', 'v19', 'v21']; 9 | let workerModule; 10 | 11 | // Try validating node version 12 | // NOTE: This method should be manually tested if changed as it is in a sensitive code path 13 | // and is JavaScript that runs on at least node version 0.10.28 14 | function validateNodeVersion(version: string) { 15 | let errorMessage: string | undefined; 16 | let warningMessage: string | undefined; 17 | try { 18 | const versionSplit = version.split('.'); 19 | const major = versionSplit[0]; 20 | // process.version returns invalid output 21 | if (versionSplit.length != 3) { 22 | errorMessage = "Could not parse Node.js version: '" + version + "'"; 23 | // Unsupported version note: Documentation about Node's stable versions here: https://github.com/nodejs/Release#release-plan and an explanation here: https://medium.com/swlh/understanding-how-node-releases-work-in-2018-6fd356816db4 24 | } else if (process.env.AZURE_FUNCTIONS_ENVIRONMENT == 'Development' && devOnlyVersions.indexOf(major) >= 0) { 25 | warningMessage = 26 | 'Node.js version used (' + 27 | version + 28 | ') is not officially supported. You may use it during local development, but must use an officially supported version on Azure:' + 29 | ' https://aka.ms/functions-node-versions'; 30 | } else if (supportedVersions.indexOf(major) < 0) { 31 | errorMessage = 32 | 'Incompatible Node.js version' + 33 | ' (' + 34 | version + 35 | ').' + 36 | ' Refer to our documentation to see the Node.js versions supported by each version of Azure Functions: https://aka.ms/functions-node-versions'; 37 | } 38 | // Unknown error 39 | } catch (err) { 40 | const unknownError = 'Error in validating Node.js version. '; 41 | console.error(errorPrefix + unknownError + err); 42 | throw unknownError + err; 43 | } 44 | // Throw error for known version errors 45 | if (errorMessage) { 46 | console.error(errorPrefix + errorMessage); 47 | throw new Error(errorMessage); 48 | } 49 | if (warningMessage) { 50 | console.warn(warnPrefix + warningMessage); 51 | } 52 | } 53 | 54 | validateNodeVersion(process.version); 55 | 56 | // Try requiring bundle 57 | try { 58 | workerModule = require('./worker-bundle.js'); 59 | workerModule = workerModule.worker; 60 | } catch (err) { 61 | console.log(logPrefix + "Couldn't require bundle, falling back to Worker.js. " + err); 62 | workerModule = require('./Worker.js'); 63 | } 64 | 65 | workerModule.startNodeWorker(process.argv); 66 | -------------------------------------------------------------------------------- /src/parsers/parsePackageJson.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { pathExists, readJson } from 'fs-extra'; 5 | import * as path from 'path'; 6 | import { AzFuncSystemError, ensureErrorType, trySetErrorMessage } from '../errors'; 7 | 8 | export interface PackageJson { 9 | type?: string; 10 | main?: string; 11 | } 12 | 13 | /** 14 | * @returns A parsed & sanitized package.json 15 | */ 16 | export async function parsePackageJson(dir: string): Promise { 17 | try { 18 | const filePath = path.join(dir, 'package.json'); 19 | if (!(await pathExists(filePath))) { 20 | throw new AzFuncSystemError('file does not exist'); 21 | } 22 | 23 | const data: unknown = await readJson(filePath); 24 | if (typeof data !== 'object' || data === null || Array.isArray(data)) { 25 | throw new AzFuncSystemError('file content is not an object'); 26 | } 27 | 28 | const stringFields = ['main', 'type']; 29 | for (const field of stringFields) { 30 | if (field in data && typeof data[field] !== 'string') { 31 | // ignore fields with an unexpected type 32 | delete data[field]; 33 | } 34 | } 35 | return data; 36 | } catch (err) { 37 | const error: Error = ensureErrorType(err); 38 | if (error.name === 'SyntaxError') { 39 | const message = `file content is not valid JSON: ${error.message}`; 40 | trySetErrorMessage(error, message); 41 | } 42 | throw error; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/setupCoreModule.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { version } from './constants'; 5 | import { coreApiLog } from './coreApi/coreApiLog'; 6 | import { registerFunction } from './coreApi/registerFunction'; 7 | import { setProgrammingModel } from './coreApi/setProgrammingModel'; 8 | import { Disposable } from './Disposable'; 9 | import { registerHook } from './hooks/registerHook'; 10 | import { worker } from './WorkerContext'; 11 | import Module = require('module'); 12 | 13 | /** 14 | * Intercepts the default "require" method so that we can provide our own "built-in" module 15 | * This module is essentially the publicly accessible API for our worker 16 | * This module is available to users only at runtime, not as an installable npm package 17 | */ 18 | export function setupCoreModule(): void { 19 | const coreApi = { 20 | version: version, 21 | get hostVersion() { 22 | return worker.hostVersion; 23 | }, 24 | registerHook, 25 | setProgrammingModel, 26 | getProgrammingModel: () => { 27 | return worker.app.programmingModel; 28 | }, 29 | log: coreApiLog, 30 | registerFunction, 31 | Disposable, 32 | }; 33 | 34 | Module.prototype.require = new Proxy(Module.prototype.require, { 35 | apply(target, thisArg, argArray) { 36 | if (argArray[0] === '@azure/functions-core') { 37 | return coreApi; 38 | } else { 39 | return Reflect.apply(target, thisArg, argArray); 40 | } 41 | }, 42 | }); 43 | 44 | // Set default programming model shipped with the worker 45 | // This has to be imported dynamically _after_ we setup the core module since it will almost certainly reference the core module 46 | // eslint-disable-next-line @typescript-eslint/no-var-requires 47 | const func: typeof import('@azure/functions') = require('@azure/functions'); 48 | func.setup(); 49 | } 50 | -------------------------------------------------------------------------------- /src/setupEventStream.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzureFunctionsRpcMessages as rpc } from '../azure-functions-language-worker-protobuf/src/rpc'; 5 | import { AzFuncSystemError, ensureErrorType } from './errors'; 6 | import { EventHandler, SupportedRequest } from './eventHandlers/EventHandler'; 7 | import { FunctionEnvironmentReloadHandler } from './eventHandlers/FunctionEnvironmentReloadHandler'; 8 | import { FunctionLoadHandler } from './eventHandlers/FunctionLoadHandler'; 9 | import { FunctionsMetadataHandler } from './eventHandlers/FunctionsMetadataHandler'; 10 | import { InvocationHandler } from './eventHandlers/InvocationHandler'; 11 | import { terminateWorker } from './eventHandlers/terminateWorker'; 12 | import { WorkerInitHandler } from './eventHandlers/WorkerInitHandler'; 13 | import { systemError } from './utils/Logger'; 14 | import { nonNullProp } from './utils/nonNull'; 15 | import { worker } from './WorkerContext'; 16 | import LogCategory = rpc.RpcLog.RpcLogCategory; 17 | import LogLevel = rpc.RpcLog.Level; 18 | 19 | /** 20 | * Configures handlers for incoming gRPC messages on the client 21 | * 22 | * This should have a way to handle all incoming gRPC messages. 23 | * This includes all incoming StreamingMessage types (exclude *Response types and RpcLog type) 24 | */ 25 | export function setupEventStream(): void { 26 | worker.eventStream.on('data', (msg) => { 27 | void handleMessage(msg); 28 | }); 29 | 30 | worker.eventStream.on('error', function (err) { 31 | systemError(`Worker encountered event stream error: `, err); 32 | throw new AzFuncSystemError(err); 33 | }); 34 | 35 | // wrap event stream write to validate message correctness 36 | const oldWrite = worker.eventStream.write; 37 | worker.eventStream.write = function checkWrite(msg) { 38 | const msgError = rpc.StreamingMessage.verify(msg); 39 | if (msgError) { 40 | systemError(`Worker malformed message`, msgError); 41 | throw new AzFuncSystemError(msgError); 42 | } 43 | oldWrite.apply(worker.eventStream, [msg]); 44 | }; 45 | } 46 | 47 | async function handleMessage(inMsg: rpc.StreamingMessage): Promise { 48 | const outMsg: rpc.IStreamingMessage = { 49 | requestId: inMsg.requestId, 50 | }; 51 | 52 | let eventHandler: EventHandler | undefined; 53 | let request: SupportedRequest | undefined; 54 | try { 55 | const eventName = inMsg.content; 56 | switch (eventName) { 57 | case 'functionEnvironmentReloadRequest': 58 | eventHandler = new FunctionEnvironmentReloadHandler(); 59 | break; 60 | case 'functionLoadRequest': 61 | eventHandler = new FunctionLoadHandler(); 62 | break; 63 | case 'invocationRequest': 64 | eventHandler = new InvocationHandler(); 65 | break; 66 | case 'workerInitRequest': 67 | eventHandler = new WorkerInitHandler(); 68 | break; 69 | case 'workerTerminate': 70 | // Worker terminate request is a special request which gracefully shuts down worker 71 | // It doesn't have a response so we don't have an EventHandler class for it 72 | await terminateWorker(nonNullProp(inMsg, eventName)); 73 | return; 74 | case 'workerStatusRequest': 75 | // Worker sends the host empty response to evaluate the worker's latency 76 | // The response doesn't even allow a `result` property, which is why we don't implement an EventHandler class 77 | outMsg.workerStatusResponse = {}; 78 | worker.eventStream.write(outMsg); 79 | return; 80 | case 'functionsMetadataRequest': 81 | eventHandler = new FunctionsMetadataHandler(); 82 | break; 83 | case 'closeSharedMemoryResourcesRequest': 84 | case 'fileChangeEventRequest': 85 | case 'functionLoadRequestCollection': 86 | case 'invocationCancel': 87 | case 'startStream': 88 | case 'workerHeartbeat': 89 | // Not yet implemented 90 | return; 91 | default: 92 | throw new AzFuncSystemError(`Worker had no handler for message '${eventName}'`); 93 | } 94 | 95 | request = nonNullProp(inMsg, eventName); 96 | const response = await eventHandler.handleEvent(request); 97 | response.result = { status: rpc.StatusResult.Status.Success }; 98 | outMsg[eventHandler.responseName] = response; 99 | } catch (err) { 100 | const error = ensureErrorType(err); 101 | if (error.isAzureFunctionsSystemError && !error.loggedOverRpc) { 102 | worker.log({ 103 | message: error.message, 104 | level: LogLevel.Error, 105 | logCategory: LogCategory.System, 106 | }); 107 | } 108 | 109 | if (eventHandler && request) { 110 | const response = eventHandler.getDefaultResponse(request); 111 | response.result = { 112 | status: rpc.StatusResult.Status.Failure, 113 | exception: { 114 | message: error.message, 115 | stackTrace: error.stack, 116 | }, 117 | }; 118 | outMsg[eventHandler.responseName] = response; 119 | } 120 | } 121 | 122 | if (eventHandler) { 123 | worker.eventStream.write(outMsg); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/startApp.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AppStartContext } from '@azure/functions-core'; 5 | import { AzureFunctionsRpcMessages as rpc } from '../azure-functions-language-worker-protobuf/src/rpc'; 6 | import { AzFuncSystemError, ensureErrorType, ReadOnlyError, trySetErrorMessage } from './errors'; 7 | import { executeHooks } from './hooks/executeHooks'; 8 | import { loadScriptFile } from './loadScriptFile'; 9 | import { parsePackageJson } from './parsers/parsePackageJson'; 10 | import { isDefined, nonNullProp } from './utils/nonNull'; 11 | import { isEnvironmentVariableSet, isNode20Plus } from './utils/util'; 12 | import { worker } from './WorkerContext'; 13 | import globby = require('globby'); 14 | import path = require('path'); 15 | import LogLevel = rpc.RpcLog.Level; 16 | import LogCategory = rpc.RpcLog.RpcLogCategory; 17 | 18 | /** 19 | * Starting an app can happen in two places, depending on if the worker was specialized or not 20 | * 1. The worker can start in "normal" mode, meaning `workerInitRequest` will reference the user's app 21 | * 2. The worker can start in "placeholder" mode, meaning `workerInitRequest` will reference a dummy app to "warm up" the worker and `functionEnvironmentReloadRequest` will be sent with the user's actual app. 22 | * This process is called worker specialization and it helps with cold start times. 23 | * The dummy app should never have actual startup code, so it should be safe to call `startApp` twice in this case 24 | * Worker specialization happens only once, so we don't need to worry about cleaning up resources from previous `functionEnvironmentReloadRequest`s. 25 | */ 26 | export async function startApp(functionAppDirectory: string): Promise { 27 | await updatePackageJson(functionAppDirectory); 28 | await loadEntryPointFile(functionAppDirectory); 29 | const appStartContext: AppStartContext = { 30 | get hookData() { 31 | return worker.app.appLevelOnlyHookData; 32 | }, 33 | set hookData(_obj) { 34 | throw new ReadOnlyError('hookData'); 35 | }, 36 | get appHookData() { 37 | return worker.app.appHookData; 38 | }, 39 | set appHookData(_obj) { 40 | throw new ReadOnlyError('appHookData'); 41 | }, 42 | functionAppDirectory, 43 | }; 44 | await executeHooks('appStart', appStartContext); 45 | } 46 | 47 | async function updatePackageJson(functionAppDirectory: string): Promise { 48 | try { 49 | worker.app.packageJson = await parsePackageJson(functionAppDirectory); 50 | } catch (err) { 51 | const error = ensureErrorType(err); 52 | worker.log({ 53 | message: `Worker failed to load package.json: ${error.message}`, 54 | level: LogLevel.Warning, 55 | logCategory: LogCategory.System, 56 | }); 57 | worker.app.packageJson = {}; 58 | } 59 | } 60 | 61 | async function loadEntryPointFile(functionAppDirectory: string): Promise { 62 | const entryPointPattern = worker.app.packageJson.main; 63 | if (entryPointPattern) { 64 | let currentFile: string | undefined = undefined; 65 | try { 66 | const files = await globby(entryPointPattern, { cwd: functionAppDirectory }); 67 | if (files.length === 0) { 68 | let message: string = globby.hasMagic(entryPointPattern, { cwd: functionAppDirectory }) 69 | ? 'Found zero files matching the supplied pattern' 70 | : 'File does not exist'; 71 | 72 | if (entryPointPattern === 'index.js') { 73 | // This is by far the most common error and typically happens by accident, so we'll give these folks a little more help 74 | message += '. Learn more here: https://aka.ms/AAla7et'; 75 | } 76 | 77 | throw new AzFuncSystemError(message); 78 | } 79 | 80 | for (const file of files) { 81 | currentFile = file; 82 | worker.log({ 83 | message: `Loading entry point file "${file}"`, 84 | level: LogLevel.Debug, 85 | logCategory: LogCategory.System, 86 | }); 87 | try { 88 | const entryPointFilePath = path.join(functionAppDirectory, file); 89 | worker.app.currentEntryPoint = entryPointFilePath; 90 | await loadScriptFile(entryPointFilePath, worker.app.packageJson); 91 | } finally { 92 | worker.app.currentEntryPoint = undefined; 93 | } 94 | worker.log({ 95 | message: `Loaded entry point file "${file}"`, 96 | level: LogLevel.Debug, 97 | logCategory: LogCategory.System, 98 | }); 99 | } 100 | } catch (err) { 101 | const error = ensureErrorType(err); 102 | const newMessage = `Worker was unable to load entry point "${currentFile || entryPointPattern}": ${ 103 | error.message 104 | }`; 105 | 106 | if (shouldBlockOnEntryPointError()) { 107 | trySetErrorMessage(error, newMessage); 108 | error.isAzureFunctionsSystemError = true; 109 | // We don't want to throw this error now (during workerInit or funcEnvReload) because technically the worker is fine 110 | // Instead, it will be thrown during functionMetadata or functionLoad response which better indicates that the user's app is the problem 111 | worker.app.blockingAppStartError = error; 112 | // This will ensure the error makes it to the user's app insights 113 | console.error(error.stack); 114 | } else { 115 | // In this case, the error will never block the app 116 | // The most we can do without breaking backwards compatibility is log it as an rpc system log below 117 | } 118 | 119 | // Always log as rpc system log, which goes to our internal telemetry 120 | worker.log({ 121 | message: newMessage, 122 | level: LogLevel.Error, 123 | logCategory: LogCategory.System, 124 | }); 125 | error.loggedOverRpc = true; 126 | } 127 | } 128 | } 129 | 130 | function shouldBlockOnEntryPointError(): boolean { 131 | if (isNode20Plus()) { 132 | // Starting with Node 20, this will always be blocking 133 | // https://github.com/Azure/azure-functions-nodejs-worker/issues/697 134 | return true; 135 | } else { 136 | const key = 'FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR'; 137 | if (isDefined(process.env[key])) { 138 | return isEnvironmentVariableSet(process.env[key]); 139 | } else { 140 | // We think this should be a blocking error by default, but v3 can't do that for backwards compatibility reasons 141 | // https://github.com/Azure/azure-functions-nodejs-worker/issues/630 142 | const model = nonNullProp(worker.app, 'programmingModel'); 143 | return !(model.name === '@azure/functions' && model.version.startsWith('3.')); 144 | } 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /src/utils/Logger.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | /** 5 | * Use these methods only if you want to guarantee the messages reach the host despite potential performance impact. 6 | * Otherwise, please stick to utilizing the gRPC channel to propagate these messages with category: RpcLogCategory.System 7 | **/ 8 | 9 | const logPrefix = 'LanguageWorkerConsoleLog'; 10 | 11 | export function systemLog(message?: any, ...optionalParams: any[]) { 12 | console.log(logPrefix + removeNewLines(message), ...optionalParams); 13 | } 14 | 15 | export function systemWarn(message?: any, ...optionalParams: any[]) { 16 | console.warn(logPrefix + '[warn] ' + removeNewLines(message), ...optionalParams); 17 | } 18 | 19 | export function systemError(message?: any, ...optionalParams: any[]) { 20 | console.error(logPrefix + '[error] ' + removeNewLines(message), ...optionalParams); 21 | } 22 | 23 | function removeNewLines(message?: any): string { 24 | if (message && typeof message === 'string') { 25 | message = message.replace(/(\r\n|\n|\r)/gm, ' '); 26 | } 27 | return message; 28 | } 29 | -------------------------------------------------------------------------------- /src/utils/blockedMonitor.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzureFunctionsRpcMessages as rpc } from './../../azure-functions-language-worker-protobuf/src/rpc'; 5 | import LogCategory = rpc.RpcLog.RpcLogCategory; 6 | import LogLevel = rpc.RpcLog.Level; 7 | import blockedAt = require('blocked-at'); 8 | 9 | export function startBlockedMonitor( 10 | worker: { log: (log: rpc.IRpcLog) => void }, 11 | threshold = 500, 12 | intreval = 10000 13 | ): NodeJS.Timer { 14 | function logBlockedWarning(message: string) { 15 | worker.log({ 16 | message, 17 | level: LogLevel.Warning, 18 | logCategory: LogCategory.System, 19 | }); 20 | } 21 | 22 | logBlockedWarning( 23 | `Monitoring for blocking code is turned on, with a threshold of ${threshold} ms. This will have a negative impact on performance. Adjust "AZURE_FUNCTIONS_NODE_BLOCK_LOG" to turn it off. ` + 24 | 'IMPORTANT NOTE: The stack traces are only an approximation and you should analyze all synchronous operations' 25 | ); 26 | 27 | let blockedHistory: { time: string; duration: number; stack: string[] }[] = []; 28 | 29 | //threshold - minimum miliseconds of blockage to report. 30 | //other parameters are default, more details on https://github.com/naugtur/blocked-at. 31 | blockedAt( 32 | (ms, stack) => { 33 | const date = new Date(); 34 | blockedHistory.push({ time: date.toISOString(), duration: ms, stack: stack }); 35 | }, 36 | { threshold: threshold } 37 | ); 38 | 39 | // Log blockedHistory if it's not empty each 10 seconds 40 | return setInterval(() => { 41 | if (blockedHistory.length > 0) { 42 | logBlockedWarning(`Blocking code monitoring history: ${JSON.stringify(blockedHistory)}`); 43 | blockedHistory = []; 44 | } 45 | }, intreval); 46 | } 47 | -------------------------------------------------------------------------------- /src/utils/delay.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | export async function delay(ms: number): Promise { 5 | await new Promise((resolve) => setTimeout(resolve, ms)); 6 | } 7 | -------------------------------------------------------------------------------- /src/utils/nonNull.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { AzFuncSystemError } from '../errors'; 5 | 6 | /** 7 | * Retrieves a property by name from an object and checks that it's not null and not undefined. It is strongly typed 8 | * for the property and will give a compile error if the given name is not a property of the source. 9 | */ 10 | export function nonNullProp( 11 | source: TSource, 12 | name: TKey 13 | ): NonNullable { 14 | const value: NonNullable = >source[name]; 15 | return nonNullValue(value, name); 16 | } 17 | 18 | /** 19 | * Validates that a given value is not null and not undefined. 20 | */ 21 | export function nonNullValue(value: T | undefined, propertyNameOrMessage?: string): T { 22 | if (value === null || value === undefined) { 23 | throw new AzFuncSystemError( 24 | 'Internal error: Expected value to be neither null nor undefined' + 25 | (propertyNameOrMessage ? `: ${propertyNameOrMessage}` : '') 26 | ); 27 | } 28 | 29 | return value; 30 | } 31 | 32 | export function isDefined(data: T | undefined | null): data is T { 33 | return data !== null && data !== undefined; 34 | } 35 | -------------------------------------------------------------------------------- /src/utils/util.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as semver from 'semver'; 5 | 6 | export function isEnvironmentVariableSet(val: string | boolean | number | undefined | null): boolean { 7 | return !/^(false|0)?$/i.test(val === undefined || val === null ? '' : String(val)); 8 | } 9 | 10 | export function isNode20Plus(): boolean { 11 | return semver.gte(process.versions.node, '20.0.0'); 12 | } 13 | -------------------------------------------------------------------------------- /test/Worker.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import 'mocha'; 5 | import { expect } from 'chai'; 6 | import { startNodeWorker } from '../src/Worker'; 7 | 8 | describe('Worker', () => { 9 | it('throws error on incorrect args: grpcMaxMessageLength 0', () => { 10 | const args = [ 11 | '/node', 12 | 'nodejsWorker.js', 13 | '--functions-uri', 14 | 'http://127.0.0.1:58870/', 15 | '--functions-worker-id', 16 | 'bd2e3e80-46ba', 17 | '--functions-request-id', 18 | 'bd2e3e80-46ba', 19 | '--functions-grpc-max-message-length', 20 | '0', 21 | ]; 22 | expect(() => { 23 | startNodeWorker(args); 24 | }).to.throw("gRPC client connection info is missing or incorrect ('functions-grpc-max-message-length' is 0)."); 25 | }); 26 | 27 | it('throws error on incorrect args: grpcMaxMessageLength 0 and null requestId', () => { 28 | const args = [ 29 | '/node', 30 | 'nodejsWorker.js', 31 | '--functions-uri', 32 | 'http://127.0.0.1:58870/', 33 | '--functions-worker-id', 34 | 'bd2e3e80-46ba', 35 | '--functions-grpc-max-message-length', 36 | '0', 37 | ]; 38 | expect(() => { 39 | startNodeWorker(args); 40 | }).to.throw( 41 | "gRPC client connection info is missing or incorrect ('functions-request-id' is undefined, 'functions-grpc-max-message-length' is 0)." 42 | ); 43 | }); 44 | }); 45 | -------------------------------------------------------------------------------- /test/blockMonitorTest.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import 'mocha'; 5 | import { expect } from 'chai'; 6 | import { AzureFunctionsRpcMessages as rpc } from './../azure-functions-language-worker-protobuf/src/rpc'; 7 | import { startBlockedMonitor } from './../src/utils/blockedMonitor'; 8 | import LogLevel = rpc.RpcLog.Level; 9 | 10 | describe('Event loop blocking operation monitor', () => { 11 | it('startBlockMonitor logs warning', async () => { 12 | console.log('start ' + new Date().getSeconds() + ':' + new Date().getMilliseconds()); 13 | let timer: NodeJS.Timer | null = null; 14 | let isTimerDestroyed = false; 15 | const logFun = function (log: rpc.IRpcLog): void { 16 | expect(log.level).to.equal(LogLevel.Warning); 17 | if (log.message && log.message.startsWith('Blocking code monitoring history')) { 18 | if (timer) { 19 | clearInterval(timer); 20 | isTimerDestroyed = true; 21 | } 22 | } 23 | }; 24 | 25 | timer = startBlockedMonitor({ log: logFun }, 100, 100); 26 | await new Promise((resolve) => { 27 | //Adding new event to event loop to start monitoring 28 | setTimeout(() => { 29 | resolve(true); 30 | }, 1); 31 | }); 32 | const end = Date.now() + 500; 33 | while (Date.now() < end) {} // blocking code 34 | 35 | await new Promise((resolve) => { 36 | //assert 37 | setTimeout(() => { 38 | if (isTimerDestroyed) { 39 | resolve(true); 40 | } 41 | }, 500); 42 | }); 43 | }); 44 | }); 45 | -------------------------------------------------------------------------------- /test/errors.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import 'mocha'; 5 | import { expect } from 'chai'; 6 | import { ensureErrorType, trySetErrorMessage } from '../src/errors'; 7 | 8 | describe('errors', () => { 9 | it('null', () => { 10 | validateError(ensureErrorType(null), 'Unknown error'); 11 | }); 12 | 13 | it('undefined', () => { 14 | validateError(ensureErrorType(undefined), 'Unknown error'); 15 | }); 16 | 17 | it('boolean', () => { 18 | validateError(ensureErrorType(true), 'true'); 19 | validateError(ensureErrorType(false), 'false'); 20 | }); 21 | 22 | it('number', () => { 23 | validateError(ensureErrorType(5), '5'); 24 | }); 25 | 26 | it('string', () => { 27 | validateError(ensureErrorType('test'), 'test'); 28 | validateError(ensureErrorType(' '), ' '); 29 | validateError(ensureErrorType(''), ''); 30 | }); 31 | 32 | it('object', () => { 33 | validateError(ensureErrorType({ test: '2' }), '{"test":"2"}'); 34 | }); 35 | 36 | it('array', () => { 37 | validateError(ensureErrorType([1, 2]), '[1,2]'); 38 | }); 39 | 40 | it('error', () => { 41 | const actualError = new Error('test2'); // Should return the original error instance, so don't use validateError which is more of a "deep" equal 42 | expect(ensureErrorType(actualError)).to.equal(actualError); 43 | }); 44 | 45 | it('modify error message', () => { 46 | const actualError = new Error('test2'); 47 | trySetErrorMessage(actualError, 'modified message'); 48 | 49 | expect(actualError.message).to.equal('modified message'); 50 | }); 51 | 52 | it('readonly error', () => { 53 | class ReadOnlyError extends Error { 54 | get message(): string { 55 | return 'a readonly message'; 56 | } 57 | } 58 | 59 | const actualError = new ReadOnlyError(); 60 | 61 | // @ts-expect-error: create a function to test that writing throws an exception 62 | expect(() => (actualError.message = 'exception')).to.throw(); 63 | 64 | const wrappedError = ensureErrorType(actualError); 65 | const message = 'Readonly error has not been modified'; 66 | trySetErrorMessage(wrappedError, message); 67 | 68 | expect(wrappedError.message).to.equal('a readonly message'); 69 | expect(wrappedError.stack).to.not.contain('Readonly error has been modified'); 70 | }); 71 | 72 | function validateError(actual: Error, expectedMessage: string): void { 73 | expect(actual).to.be.instanceof(Error); 74 | expect(actual.message).to.equal(expectedMessage); 75 | } 76 | }); 77 | -------------------------------------------------------------------------------- /test/eventHandlers/FunctionLoadHandler.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import 'mocha'; 5 | import { expect } from 'chai'; 6 | import * as fs from 'fs/promises'; 7 | import * as path from 'path'; 8 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 9 | import { getLegacyFunction } from '../../src/LegacyFunctionLoader'; 10 | import { delay } from '../../src/utils/delay'; 11 | import { nonNullValue } from '../../src/utils/nonNull'; 12 | import { worker } from '../../src/WorkerContext'; 13 | import { beforeEventHandlerSuite } from './beforeEventHandlerSuite'; 14 | import { msg } from './msg'; 15 | import { tempFile, testAppSrcPath } from './testAppUtils'; 16 | import { RegExpStreamingMessage, TestEventStream } from './TestEventStream'; 17 | 18 | describe('FunctionLoadHandler', () => { 19 | let stream: TestEventStream; 20 | 21 | before(() => { 22 | stream = beforeEventHandlerSuite(); 23 | }); 24 | 25 | afterEach(async () => { 26 | await stream.afterEachEventHandlerTest(); 27 | }); 28 | 29 | it('responds to function load', async () => { 30 | stream.addTestMessage(msg.funcLoad.request('helloWorld.js')); 31 | await stream.assertCalledWith(msg.funcLoad.receivedRequestLog, msg.funcLoad.response); 32 | expect(Object.keys(worker.app.legacyFunctions).length).to.equal(1); 33 | }); 34 | 35 | it('handles function load exception', async () => { 36 | stream.addTestMessage(msg.funcLoad.request('throwError.js')); 37 | 38 | const message = "Worker was unable to load function testFuncName: 'test'"; 39 | 40 | await stream.assertCalledWith( 41 | msg.funcLoad.receivedRequestLog, 42 | msg.errorLog(message), 43 | msg.funcLoad.failedResponse(message) 44 | ); 45 | }); 46 | 47 | it('handles transient lstat function load exception', async function (this: Mocha.ITestCallbackContext): Promise { 48 | // https://github.com/Azure/azure-functions-nodejs-worker/issues/693 49 | 50 | this.timeout(15 * 1000); 51 | 52 | await fs.writeFile( 53 | path.join(testAppSrcPath, tempFile), 54 | `if (Date.now() < ${Date.now() + 5 * 1000}) 55 | { 56 | throw new Error("UNKNOWN: unknown error, lstat 'D:\\\\home'"); 57 | } else { 58 | module.exports = async () => { } 59 | }` 60 | ); 61 | 62 | stream.addTestMessage(msg.funcLoad.request(tempFile)); 63 | 64 | const errorMessage = "UNKNOWN: unknown error, lstat 'D:\\home'"; 65 | const msgs: (rpc.IStreamingMessage | RegExpStreamingMessage)[] = [ 66 | msg.funcLoad.receivedRequestLog, 67 | msg.warningLog(`Warning: Failed to load file with error "${errorMessage}"`), 68 | ]; 69 | for (let i = 2; i <= 5; i++) { 70 | msgs.push(msg.debugLog(`Retrying file load. Attempt ${i}/10`)); 71 | } 72 | msgs.push(msg.funcLoad.response); 73 | 74 | await delay(8 * 1000); 75 | 76 | await stream.assertCalledWith(...msgs); 77 | }); 78 | 79 | it('throws unable to determine function entry point', async () => { 80 | stream.addTestMessage(msg.funcLoad.request('doNothing.js')); 81 | 82 | const message = 83 | "Worker was unable to load function testFuncName: 'Unable to determine function entry point. If multiple functions are exported, you must indicate the entry point, either by naming it 'run' or 'index', or by naming it explicitly via the 'entryPoint' metadata property.'"; 84 | 85 | await stream.assertCalledWith( 86 | msg.funcLoad.receivedRequestLog, 87 | msg.errorLog(message), 88 | msg.funcLoad.failedResponse(message) 89 | ); 90 | }); 91 | 92 | it('throws unable to determine function entry point with entryPoint name', async () => { 93 | stream.addTestMessage(msg.funcLoad.request('doNothing.js', { entryPoint: 'wrongEntryPoint' })); 94 | 95 | const message = 96 | "Worker was unable to load function testFuncName: 'Unable to determine function entry point: wrongEntryPoint. If multiple functions are exported, you must indicate the entry point, either by naming it 'run' or 'index', or by naming it explicitly via the 'entryPoint' metadata property.'"; 97 | 98 | await stream.assertCalledWith( 99 | msg.funcLoad.receivedRequestLog, 100 | msg.errorLog(message), 101 | msg.funcLoad.failedResponse(message) 102 | ); 103 | }); 104 | 105 | it('does not load proxy function', async () => { 106 | stream.addTestMessage(msg.funcLoad.request('doNothing.js', { isProxy: true })); 107 | 108 | await stream.assertCalledWith(msg.funcLoad.receivedRequestLog, msg.funcLoad.response); 109 | 110 | expect(Object.keys(worker.app.legacyFunctions).length).to.equal(0); 111 | }); 112 | 113 | it('throws the resolved entry point is not a function', async () => { 114 | stream.addTestMessage(msg.funcLoad.request('moduleNotAFunction.js', { entryPoint: 'test' })); 115 | 116 | const message = 117 | "Worker was unable to load function testFuncName: 'The resolved entry point is not a function and cannot be invoked by the functions runtime. Make sure the function has been correctly exported.'"; 118 | 119 | await stream.assertCalledWith( 120 | msg.funcLoad.receivedRequestLog, 121 | msg.errorLog(message), 122 | msg.funcLoad.failedResponse(message) 123 | ); 124 | }); 125 | 126 | it("function returned is a clone so that it can't affect other executions", async () => { 127 | stream.addTestMessage(msg.funcLoad.request('helloWorld.js')); 128 | 129 | await stream.assertCalledWith(msg.funcLoad.receivedRequestLog, msg.funcLoad.response); 130 | 131 | const userFunction = nonNullValue(getLegacyFunction('testFuncId')).callback; 132 | Object.assign(userFunction, { hello: 'world' }); 133 | 134 | const userFunction2 = nonNullValue(getLegacyFunction('testFuncId')).callback; 135 | 136 | expect(userFunction).to.not.equal(userFunction2); 137 | expect(userFunction['hello']).to.equal('world'); 138 | expect(userFunction2['hello']).to.be.undefined; 139 | }); 140 | }); 141 | -------------------------------------------------------------------------------- /test/eventHandlers/TestEventStream.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { expect } from 'chai'; 5 | import { EventEmitter } from 'events'; 6 | import * as fs from 'fs/promises'; 7 | import * as path from 'path'; 8 | import * as sinon from 'sinon'; 9 | import { AzureFunctionsRpcMessages as rpc } from '../../azure-functions-language-worker-protobuf/src/rpc'; 10 | import { IEventStream } from '../../src/GrpcClient'; 11 | import { worker } from '../../src/WorkerContext'; 12 | import { testAppSrcPath, testPackageJsonPath } from './testAppUtils'; 13 | 14 | export class TestEventStream extends EventEmitter implements IEventStream { 15 | originalEnv: NodeJS.ProcessEnv; 16 | originalCwd: string; 17 | written: sinon.SinonSpy; 18 | constructor() { 19 | super(); 20 | this.written = sinon.spy(); 21 | this.originalEnv = { ...process.env }; 22 | this.originalCwd = process.cwd(); 23 | } 24 | write(message: rpc.IStreamingMessage) { 25 | this.written(message); 26 | } 27 | end(): void {} 28 | 29 | addTestMessage(msg: rpc.IStreamingMessage) { 30 | this.emit('data', rpc.StreamingMessage.create(msg)); 31 | } 32 | 33 | /** 34 | * Waits up to a second for the expected number of messages to be written and then validates those messages 35 | */ 36 | async assertCalledWith(...expectedMsgs: (rpc.IStreamingMessage | RegExpStreamingMessage)[]): Promise { 37 | try { 38 | // Wait for up to a second for the expected number of messages to come in 39 | const maxTime = Date.now() + 1000; 40 | const interval = 10; 41 | while (this.written.getCalls().length < expectedMsgs.length && Date.now() < maxTime) { 42 | await new Promise((resolve) => setTimeout(resolve, interval)); 43 | } 44 | 45 | const calls = this.written.getCalls(); 46 | 47 | // First, validate the "shortened" form of the messages. This will result in a more readable error for most test failures 48 | if ( 49 | !expectedMsgs.find((m) => m instanceof RegExpStreamingMessage) || 50 | calls.length !== expectedMsgs.length 51 | ) { 52 | // shortened message won't work if it's a regexp 53 | // but if the call count doesn't match, this error will be better than the one below 54 | const shortExpectedMsgs = expectedMsgs.map(getShortenedMsg); 55 | const shortActualMsgs = calls.map((c) => getShortenedMsg(c.args[0])); 56 | expect(shortActualMsgs).to.deep.equal(shortExpectedMsgs); 57 | } 58 | 59 | // Next, do a more comprehensive check on the messages 60 | expect(calls.length).to.equal( 61 | expectedMsgs.length, 62 | 'Message count does not match. This may be caused by the previous test writing extraneous messages.' 63 | ); 64 | for (let i = 0; i < expectedMsgs.length; i++) { 65 | const call = calls[i]; 66 | expect(call.args).to.have.length(1); 67 | const actualMsg = convertHttpResponse(call.args[0]); 68 | 69 | let expectedMsg = expectedMsgs[i]; 70 | if (expectedMsg instanceof RegExpStreamingMessage) { 71 | expectedMsg.validateRegExpProps(actualMsg); 72 | expectedMsg = expectedMsg.expectedMsg; 73 | } 74 | expectedMsg = convertHttpResponse(expectedMsg); 75 | 76 | expect(actualMsg).to.deep.equal(expectedMsg); 77 | } 78 | } finally { 79 | this.written.resetHistory(); 80 | } 81 | } 82 | 83 | /** 84 | * Verifies the test didn't send any extraneous messages 85 | */ 86 | async afterEachEventHandlerTest(): Promise { 87 | // Reset `process.env` and process.cwd() after each test so it doesn't affect other tests 88 | process.chdir(this.originalCwd); 89 | for (const key of Object.keys(process.env)) { 90 | if (!(key in this.originalEnv)) { 91 | delete process.env[key]; 92 | } 93 | } 94 | Object.assign(process.env, this.originalEnv); 95 | 96 | // Reset require cache for test app files, otherwise they're only ever loaded once 97 | const files = await fs.readdir(testAppSrcPath); 98 | for (const file of files) { 99 | delete require.cache[require.resolve(path.join(testAppSrcPath, file))]; 100 | } 101 | 102 | await fs.writeFile(testPackageJsonPath, '{}'); 103 | 104 | worker._hostVersion = undefined; 105 | worker.resetApp(this.originalCwd); 106 | 107 | // minor delay so that it's more likely extraneous messages are associated with this test as opposed to leaking into the next test 108 | await new Promise((resolve) => setTimeout(resolve, 20)); 109 | await this.assertCalledWith(); 110 | } 111 | } 112 | 113 | function getShortenedMsg(msg: rpc.IStreamingMessage | RegExpStreamingMessage): string { 114 | msg = msg instanceof RegExpStreamingMessage ? msg.expectedMsg : msg; 115 | if (msg.rpcLog?.message) { 116 | return msg.rpcLog.message; 117 | } else { 118 | for (const [k, v] of Object.entries(msg)) { 119 | // only interested in response messages 120 | if (/response/i.test(k)) { 121 | let result: string; 122 | let errorMsg: string | undefined; 123 | switch (v.result?.status) { 124 | case rpc.StatusResult.Status.Success: 125 | result = 'success'; 126 | break; 127 | case rpc.StatusResult.Status.Failure: 128 | result = 'failed'; 129 | errorMsg = v.result.exception?.message; 130 | break; 131 | case rpc.StatusResult.Status.Cancelled: 132 | result = 'cancelled'; 133 | break; 134 | default: 135 | result = 'unknown'; 136 | break; 137 | } 138 | let shortMsg = `Message: "${k}". Result: "${result}"`; 139 | if (errorMsg) { 140 | shortMsg += ` Error: "${errorMsg}"`; 141 | } 142 | return shortMsg; 143 | } 144 | } 145 | } 146 | return 'Unknown message'; 147 | } 148 | 149 | /** 150 | * Converts the `HttpResponse` object in any invocation response message to a simpler object that's easier to verify with `deep.equal` 151 | */ 152 | function convertHttpResponse(msg: rpc.IStreamingMessage): rpc.IStreamingMessage { 153 | if (msg.invocationResponse?.outputData) { 154 | for (const entry of msg.invocationResponse.outputData) { 155 | if (entry.data?.http) { 156 | const res = entry.data.http; 157 | entry.data.http = { 158 | body: res.body, 159 | cookies: res.cookies, 160 | headers: res.headers, 161 | statusCode: res.statusCode?.toString(), 162 | }; 163 | } 164 | } 165 | } 166 | return msg; 167 | } 168 | 169 | export type RegExpProps = { [keyPath: string]: RegExp }; 170 | 171 | /** 172 | * Allows you to use regular expressions to validate properties of the message instead of just deep equal 173 | */ 174 | export class RegExpStreamingMessage { 175 | expectedMsg: rpc.IStreamingMessage; 176 | #regExpProps: RegExpProps; 177 | 178 | constructor(expectedMsg: rpc.IStreamingMessage, regExpProps: RegExpProps) { 179 | this.expectedMsg = expectedMsg; 180 | this.#regExpProps = regExpProps; 181 | } 182 | 183 | validateRegExpProps(actualMsg: rpc.IStreamingMessage) { 184 | for (const [keyPath, regExp] of Object.entries(this.#regExpProps)) { 185 | let lastKey: string = keyPath; 186 | let lastObject: {} = actualMsg; 187 | let value: unknown = actualMsg; 188 | for (const subpath of keyPath.split('.')) { 189 | if (typeof value === 'object' && value !== null) { 190 | lastKey = subpath; 191 | lastObject = value; 192 | value = value[subpath]; 193 | } else { 194 | break; 195 | } 196 | } 197 | expect(value).to.match(regExp); 198 | 199 | delete lastObject[lastKey]; 200 | } 201 | } 202 | } 203 | -------------------------------------------------------------------------------- /test/eventHandlers/WorkerStatusHandler.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import 'mocha'; 5 | import { beforeEventHandlerSuite } from './beforeEventHandlerSuite'; 6 | import { TestEventStream } from './TestEventStream'; 7 | 8 | describe('WorkerStatusHandler', () => { 9 | let stream: TestEventStream; 10 | 11 | before(() => { 12 | stream = beforeEventHandlerSuite(); 13 | }); 14 | 15 | afterEach(async () => { 16 | await stream.afterEachEventHandlerTest(); 17 | }); 18 | 19 | it('responds to worker status', async () => { 20 | stream.addTestMessage({ 21 | requestId: 'testReqId', 22 | workerStatusRequest: {}, 23 | }); 24 | await stream.assertCalledWith({ 25 | requestId: 'testReqId', 26 | workerStatusResponse: {}, 27 | }); 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /test/eventHandlers/beforeEventHandlerSuite.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { setupCoreModule } from '../../src/setupCoreModule'; 5 | import { setupEventStream } from '../../src/setupEventStream'; 6 | import { worker } from '../../src/WorkerContext'; 7 | import { TestEventStream } from './TestEventStream'; 8 | 9 | let testEventStream: TestEventStream | undefined; 10 | 11 | export function beforeEventHandlerSuite() { 12 | if (!testEventStream) { 13 | worker.id = '00000000-0000-0000-0000-000000000000'; 14 | testEventStream = new TestEventStream(); 15 | worker.eventStream = testEventStream; 16 | setupEventStream(); 17 | setupCoreModule(); 18 | // Clear out logs that happened during setup, so that they don't affect whichever test runs first 19 | testEventStream.written.resetHistory(); 20 | } 21 | return testEventStream; 22 | } 23 | -------------------------------------------------------------------------------- /test/eventHandlers/terminateWorker.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as coreTypes from '@azure/functions-core'; 5 | import { expect } from 'chai'; 6 | import { worker } from '../../src/WorkerContext'; 7 | import { beforeEventHandlerSuite } from './beforeEventHandlerSuite'; 8 | import { msg } from './msg'; 9 | import { TestEventStream } from './TestEventStream'; 10 | import sinon = require('sinon'); 11 | 12 | describe('terminateWorker', () => { 13 | let stream: TestEventStream; 14 | let processExitStub: sinon.SinonStub; 15 | let streamEndStub: sinon.SinonStub; 16 | let coreApi: typeof coreTypes; 17 | 18 | before(async () => { 19 | stream = beforeEventHandlerSuite(); 20 | processExitStub = sinon.stub(process, 'exit'); 21 | streamEndStub = sinon.stub(worker.eventStream, 'end'); 22 | coreApi = await import('@azure/functions-core'); 23 | }); 24 | 25 | afterEach(async () => { 26 | processExitStub.resetHistory(); 27 | streamEndStub.resetHistory(); 28 | await stream.afterEachEventHandlerTest(); 29 | }); 30 | 31 | after(() => { 32 | processExitStub.restore(); 33 | }); 34 | 35 | it('handles worker_terminate request', async () => { 36 | stream.addTestMessage(msg.terminate.request()); 37 | await stream.assertCalledWith(msg.terminate.receivedWorkerTerminateLog); 38 | }); 39 | 40 | it('ends event stream', async () => { 41 | stream.addTestMessage(msg.terminate.request()); 42 | await stream.assertCalledWith(msg.terminate.receivedWorkerTerminateLog); 43 | expect(streamEndStub.callCount).to.be.equal(1); 44 | }); 45 | 46 | it('shuts down worker process', async () => { 47 | stream.addTestMessage(msg.terminate.request()); 48 | await stream.assertCalledWith(msg.terminate.receivedWorkerTerminateLog); 49 | expect(processExitStub.calledWith(0)).to.be.true; 50 | }); 51 | 52 | it('runs app terminate hooks', async () => { 53 | const expectedContext: coreTypes.AppTerminateContext = { 54 | hookData: {}, 55 | appHookData: {}, 56 | }; 57 | const hookFunc = sinon.spy(); 58 | coreApi.registerHook('appTerminate', hookFunc); 59 | 60 | stream.addTestMessage(msg.terminate.request()); 61 | await stream.assertCalledWith( 62 | msg.terminate.receivedWorkerTerminateLog, 63 | msg.executingAppHooksLog(1, 'appTerminate'), 64 | msg.executedAppHooksLog('appTerminate') 65 | ); 66 | expect(hookFunc.callCount).to.be.equal(1); 67 | expect(hookFunc.args[0][0]).to.deep.equal(expectedContext); 68 | }); 69 | 70 | it('allows app terminate hooks to share data', async () => { 71 | let hookData = ''; 72 | coreApi.registerHook('appTerminate', (context) => { 73 | context.hookData.hello = 'world'; 74 | context.appHookData.foo = 'bar'; 75 | hookData += 'term1'; 76 | }); 77 | coreApi.registerHook('appTerminate', (context) => { 78 | expect(context.hookData.hello).to.equal('world'); 79 | expect(context.appHookData.foo).to.equal('bar'); 80 | hookData += 'term2'; 81 | }); 82 | 83 | stream.addTestMessage(msg.terminate.request()); 84 | await stream.assertCalledWith( 85 | msg.terminate.receivedWorkerTerminateLog, 86 | msg.executingAppHooksLog(2, 'appTerminate'), 87 | msg.executedAppHooksLog('appTerminate') 88 | ); 89 | expect(hookData).to.equal('term1term2'); 90 | }); 91 | 92 | it('allows app start and app terminate hooks to share data', async () => { 93 | let hookData = ''; 94 | coreApi.registerHook('appStart', (context) => { 95 | context.hookData.hello = 'world'; 96 | context.appHookData.foo = 'bar'; 97 | hookData += 'start'; 98 | }); 99 | coreApi.registerHook('appTerminate', (context) => { 100 | expect(context.hookData.hello).to.equal('world'); 101 | expect(context.appHookData.foo).to.equal('bar'); 102 | hookData += 'term'; 103 | }); 104 | 105 | stream.addTestMessage(msg.init.request()); 106 | await stream.assertCalledWith( 107 | msg.init.receivedRequestLog, 108 | msg.noPackageJsonWarning, 109 | msg.executingAppHooksLog(1, 'appStart'), 110 | msg.executedAppHooksLog('appStart'), 111 | msg.init.response 112 | ); 113 | 114 | stream.addTestMessage(msg.terminate.request()); 115 | await stream.assertCalledWith( 116 | msg.terminate.receivedWorkerTerminateLog, 117 | msg.executingAppHooksLog(1, 'appTerminate'), 118 | msg.executedAppHooksLog('appTerminate') 119 | ); 120 | 121 | expect(hookData).to.equal('startterm'); 122 | }); 123 | 124 | it('enforces readonly property of hookData and appHookData in hook contexts', async () => { 125 | coreApi.registerHook('appTerminate', (context) => { 126 | expect(() => { 127 | // @ts-expect-error: setting readonly property 128 | context.hookData = { 129 | hello: 'world', 130 | }; 131 | }).to.throw(`Cannot assign to read only property 'hookData'`); 132 | expect(() => { 133 | // @ts-expect-error: setting readonly property 134 | context.appHookData = { 135 | hello: 'world', 136 | }; 137 | }).to.throw(`Cannot assign to read only property 'appHookData'`); 138 | }); 139 | 140 | stream.addTestMessage(msg.terminate.request()); 141 | 142 | await stream.assertCalledWith( 143 | msg.terminate.receivedWorkerTerminateLog, 144 | msg.executingAppHooksLog(1, 'appTerminate'), 145 | msg.executedAppHooksLog('appTerminate') 146 | ); 147 | }); 148 | }); 149 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/.gitignore: -------------------------------------------------------------------------------- 1 | package.json 2 | src/temp.js -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/doNothing.cjs: -------------------------------------------------------------------------------- 1 | // do nothing 2 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/doNothing.js: -------------------------------------------------------------------------------- 1 | // do nothing 2 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/doNothing.mjs: -------------------------------------------------------------------------------- 1 | // do nothing 2 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/doNothing2.js: -------------------------------------------------------------------------------- 1 | // do nothing 2 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/helloWorld.js: -------------------------------------------------------------------------------- 1 | module.exports = async (context, req) => { 2 | return { 3 | body: 'Hello, world!', 4 | }; 5 | }; 6 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/longLoad.js: -------------------------------------------------------------------------------- 1 | const start = Date.now(); 2 | while (Date.now() < start + 1001) {} 3 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/moduleNotAFunction.js: -------------------------------------------------------------------------------- 1 | module.exports = { test: {} }; 2 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/moduleWithThis.js: -------------------------------------------------------------------------------- 1 | class FuncObject { 2 | prop = 'testThisProp'; 3 | constructor() {} 4 | async test(context) { 5 | context.log(`This value: "${this.prop}"`); 6 | } 7 | } 8 | 9 | module.exports = new FuncObject(); 10 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/registerAppStartHook.js: -------------------------------------------------------------------------------- 1 | const func = require('@azure/functions-core'); 2 | func.registerHook('appStart', () => {}); 3 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/registerFunction.js: -------------------------------------------------------------------------------- 1 | const func = require('@azure/functions-core'); 2 | func.registerFunction({ name: 'testFunc', bindings: [] }, () => {}); 3 | -------------------------------------------------------------------------------- /test/eventHandlers/testApp/src/throwError.js: -------------------------------------------------------------------------------- 1 | throw new Error('test'); 2 | -------------------------------------------------------------------------------- /test/eventHandlers/testAppUtils.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import * as fs from 'fs/promises'; 5 | import * as path from 'path'; 6 | 7 | export const tempFile = 'temp.js'; 8 | export const testAppPath = path.join(__dirname, 'testApp'); 9 | export const testAppSrcPath = path.join(testAppPath, 'src'); 10 | export const testPackageJsonPath = path.join(testAppPath, 'package.json'); 11 | 12 | export async function setTestAppMainField(fileName: string): Promise { 13 | const fileSubpath = `src/${fileName}`; 14 | await fs.writeFile(testPackageJsonPath, JSON.stringify({ main: fileSubpath })); 15 | return fileSubpath; 16 | } 17 | -------------------------------------------------------------------------------- /test/loadScriptFile.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import 'mocha'; 5 | import { expect } from 'chai'; 6 | import { isESModule } from '../src/loadScriptFile'; 7 | 8 | describe('loadScriptFile', () => { 9 | it('respects .cjs extension', () => { 10 | const result = isESModule('test.cjs', { 11 | type: 'module', 12 | }); 13 | expect(result).to.be.false; 14 | }); 15 | 16 | it('respects .mjs extension', () => { 17 | const result = isESModule('test.mjs', { 18 | type: 'commonjs', 19 | }); 20 | expect(result).to.be.true; 21 | }); 22 | 23 | it('respects package.json module type', () => { 24 | const result = isESModule('test.js', { 25 | type: 'module', 26 | }); 27 | expect(result).to.be.true; 28 | }); 29 | 30 | it('defaults to using commonjs', () => { 31 | expect(isESModule('test.js', {})).to.be.false; 32 | expect( 33 | isESModule('test.js', { 34 | type: 'commonjs', 35 | }) 36 | ).to.be.false; 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /test/mochaReporterOptions.json: -------------------------------------------------------------------------------- 1 | { 2 | "reporterEnabled": "spec, mocha-junit-reporter", 3 | "mochaJunitReporterReporterOptions": { 4 | "mochaFile": "test/unit-test-results.xml" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /test/parsers/parsePackageJson.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import 'mocha'; 5 | import * as chai from 'chai'; 6 | import { expect } from 'chai'; 7 | import * as chaiAsPromised from 'chai-as-promised'; 8 | import * as mockFs from 'mock-fs'; 9 | import { parsePackageJson } from '../../src/parsers/parsePackageJson'; 10 | 11 | chai.use(chaiAsPromised); 12 | 13 | describe('parsePackageJson', () => { 14 | const testDir = 'testDir'; 15 | 16 | afterEach(async () => { 17 | mockFs.restore(); 18 | }); 19 | 20 | it('normal', async () => { 21 | mockFs({ [testDir]: { 'package.json': '{ "main": "index.js", "type": "commonjs" }' } }); 22 | await expect(parsePackageJson(testDir)).to.eventually.deep.equal({ main: 'index.js', type: 'commonjs' }); 23 | }); 24 | 25 | it('invalid type', async () => { 26 | mockFs({ [testDir]: { 'package.json': '{ "main": "index.js", "type": {} }' } }); 27 | await expect(parsePackageJson(testDir)).to.eventually.deep.equal({ main: 'index.js' }); 28 | }); 29 | 30 | it('invalid main', async () => { 31 | mockFs({ [testDir]: { 'package.json': '{ "main": 55, "type": "commonjs" }' } }); 32 | await expect(parsePackageJson(testDir)).to.eventually.deep.equal({ type: 'commonjs' }); 33 | }); 34 | 35 | it('missing file', async () => { 36 | await expect(parsePackageJson(testDir)).to.be.rejectedWith('file does not exist'); 37 | }); 38 | 39 | it('empty', async () => { 40 | mockFs({ [testDir]: { 'package.json': '' } }); 41 | await expect(parsePackageJson(testDir)).to.be.rejectedWith(/^file content is not valid JSON:/); 42 | }); 43 | 44 | it('missing bracket', async () => { 45 | mockFs({ [testDir]: { 'package.json': '{' } }); 46 | await expect(parsePackageJson(testDir)).to.be.rejectedWith(/^file content is not valid JSON:/); 47 | }); 48 | 49 | it('null', async () => { 50 | mockFs({ [testDir]: { 'package.json': 'null' } }); 51 | await expect(parsePackageJson(testDir)).to.be.rejectedWith('file content is not an object'); 52 | }); 53 | 54 | it('array', async () => { 55 | mockFs({ [testDir]: { 'package.json': '[]' } }); 56 | await expect(parsePackageJson(testDir)).to.be.rejectedWith('file content is not an object'); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /test/startApp.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) .NET Foundation. All rights reserved. 2 | // Licensed under the MIT License. 3 | 4 | import { IHookCallbackContext, ITestCallbackContext } from 'mocha'; 5 | import { isNode20Plus } from '../src/utils/util'; 6 | import { worker } from '../src/WorkerContext'; 7 | import { beforeEventHandlerSuite } from './eventHandlers/beforeEventHandlerSuite'; 8 | import { msg } from './eventHandlers/msg'; 9 | import { setTestAppMainField, testAppPath } from './eventHandlers/testAppUtils'; 10 | import { TestEventStream } from './eventHandlers/TestEventStream'; 11 | 12 | describe('startApp', () => { 13 | let stream: TestEventStream; 14 | 15 | before(async () => { 16 | stream = beforeEventHandlerSuite(); 17 | }); 18 | 19 | afterEach(async () => { 20 | await stream.afterEachEventHandlerTest(); 21 | }); 22 | 23 | async function verifyAppStartSucceedsAndLogsError( 24 | isModelV4: boolean, 25 | fileSubpath?: string, 26 | errorMessage?: string 27 | ): Promise { 28 | fileSubpath ||= await setTestAppMainField('throwError.js'); 29 | errorMessage ||= `Worker was unable to load entry point "${fileSubpath}": test`; 30 | 31 | stream.addTestMessage(msg.init.request(testAppPath)); 32 | if (fileSubpath.includes('missing')) { 33 | await stream.assertCalledWith(msg.init.receivedRequestLog, msg.errorLog(errorMessage), msg.init.response); 34 | } else { 35 | await stream.assertCalledWith( 36 | msg.init.receivedRequestLog, 37 | msg.loadingEntryPoint(fileSubpath), 38 | msg.errorLog(errorMessage), 39 | msg.init.response 40 | ); 41 | } 42 | 43 | stream.addTestMessage(msg.indexing.request); 44 | await stream.assertCalledWith(msg.indexing.receivedRequestLog, msg.indexing.response([], !isModelV4)); 45 | 46 | stream.addTestMessage(msg.funcLoad.request('helloWorld.js')); 47 | await stream.assertCalledWith(msg.funcLoad.receivedRequestLog, msg.funcLoad.response); 48 | } 49 | 50 | async function verifyAppStartFails(isModelV4: boolean, fileSubpath?: string, errorMessage?: string): Promise { 51 | fileSubpath ||= await setTestAppMainField('throwError.js'); 52 | errorMessage ||= `Worker was unable to load entry point "${fileSubpath}": test`; 53 | 54 | stream.addTestMessage(msg.init.request(testAppPath)); 55 | if (fileSubpath.includes('missing')) { 56 | await stream.assertCalledWith(msg.init.receivedRequestLog, msg.errorLog(errorMessage), msg.init.response); 57 | } else { 58 | await stream.assertCalledWith( 59 | msg.init.receivedRequestLog, 60 | msg.loadingEntryPoint(fileSubpath), 61 | msg.errorLog(errorMessage), 62 | msg.init.response 63 | ); 64 | } 65 | 66 | stream.addTestMessage(msg.indexing.request); 67 | if (isModelV4) { 68 | await stream.assertCalledWith( 69 | msg.indexing.receivedRequestLog, 70 | msg.indexing.failedResponse(errorMessage, false) 71 | ); 72 | } else { 73 | await stream.assertCalledWith(msg.indexing.receivedRequestLog, msg.indexing.response([], true)); 74 | } 75 | 76 | stream.addTestMessage(msg.funcLoad.request('helloWorld.js')); 77 | await stream.assertCalledWith(msg.funcLoad.receivedRequestLog, msg.funcLoad.failedResponse(errorMessage)); 78 | } 79 | 80 | describe('Node >=v20', () => { 81 | before(function (this: IHookCallbackContext) { 82 | if (!isNode20Plus()) { 83 | this.skip(); 84 | } 85 | }); 86 | 87 | it('Fails for missing entry point file', async () => { 88 | const fileSubpath = await setTestAppMainField('missing.js'); 89 | const message = `Worker was unable to load entry point "${fileSubpath}": File does not exist`; 90 | await verifyAppStartFails(false, fileSubpath, message); 91 | }); 92 | 93 | it('Fails for missing entry point glob pattern', async () => { 94 | const fileSubpath = await setTestAppMainField('missing/*.js'); 95 | const message = `Worker was unable to load entry point "${fileSubpath}": Found zero files matching the supplied pattern`; 96 | await verifyAppStartFails(false, fileSubpath, message); 97 | }); 98 | 99 | it('fails (v3)', async function (this: ITestCallbackContext) { 100 | await verifyAppStartFails(false); 101 | }); 102 | 103 | it('fails (v3) (app setting=0)', async function (this: ITestCallbackContext) { 104 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '0'; 105 | await verifyAppStartFails(false); 106 | }); 107 | 108 | it('fails (v3) (app setting=1)', async () => { 109 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '1'; 110 | await verifyAppStartFails(false); 111 | }); 112 | 113 | it('fails (v4)', async () => { 114 | worker.app.programmingModel = { name: '@azure/functions', version: '4.0.0' }; 115 | worker.app.isUsingWorkerIndexing = true; 116 | await verifyAppStartFails(true); 117 | }); 118 | 119 | it('fails (v4) (app setting=0)', async () => { 120 | worker.app.programmingModel = { name: '@azure/functions', version: '4.0.0' }; 121 | worker.app.isUsingWorkerIndexing = true; 122 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '0'; 123 | await verifyAppStartFails(true); 124 | }); 125 | 126 | it('fails (v4) (app setting=1)', async () => { 127 | worker.app.programmingModel = { name: '@azure/functions', version: '4.0.0' }; 128 | worker.app.isUsingWorkerIndexing = true; 129 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '1'; 130 | await verifyAppStartFails(true); 131 | }); 132 | }); 133 | 134 | describe('Node { 135 | before(function (this: IHookCallbackContext) { 136 | if (isNode20Plus()) { 137 | this.skip(); 138 | } 139 | }); 140 | 141 | it('Logs error for missing entry point file', async () => { 142 | const fileSubpath = await setTestAppMainField('missing.js'); 143 | const message = `Worker was unable to load entry point "${fileSubpath}": File does not exist`; 144 | await verifyAppStartSucceedsAndLogsError(false, fileSubpath, message); 145 | }); 146 | 147 | it('Logs error for missing entry point glob pattern', async () => { 148 | const fileSubpath = await setTestAppMainField('missing/*.js'); 149 | const message = `Worker was unable to load entry point "${fileSubpath}": Found zero files matching the supplied pattern`; 150 | await verifyAppStartSucceedsAndLogsError(false, fileSubpath, message); 151 | }); 152 | 153 | it('succeeds but still logs error (v3)', async function (this: ITestCallbackContext) { 154 | await verifyAppStartSucceedsAndLogsError(false); 155 | }); 156 | 157 | it('succeeds but still logs error (v3) (app setting=0)', async function (this: ITestCallbackContext) { 158 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '0'; 159 | await verifyAppStartSucceedsAndLogsError(false); 160 | }); 161 | 162 | it('fails (v3) (app setting=1)', async () => { 163 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '1'; 164 | 165 | await verifyAppStartFails(false); 166 | }); 167 | 168 | it('fails (v4)', async () => { 169 | worker.app.programmingModel = { name: '@azure/functions', version: '4.0.0' }; 170 | worker.app.isUsingWorkerIndexing = true; 171 | 172 | await verifyAppStartFails(true); 173 | }); 174 | 175 | it('succeeds but still logs error (v4) (app setting=0)', async () => { 176 | worker.app.programmingModel = { name: '@azure/functions', version: '4.0.0' }; 177 | worker.app.isUsingWorkerIndexing = true; 178 | 179 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '0'; 180 | await verifyAppStartSucceedsAndLogsError(true); 181 | }); 182 | 183 | it('fails (v4) (app setting=1)', async () => { 184 | worker.app.programmingModel = { name: '@azure/functions', version: '4.0.0' }; 185 | worker.app.isUsingWorkerIndexing = true; 186 | process.env.FUNCTIONS_NODE_BLOCK_ON_ENTRY_POINT_ERROR = '1'; 187 | 188 | await verifyAppStartFails(true); 189 | }); 190 | }); 191 | }); 192 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es6", 5 | "noImplicitAny": false, 6 | "strict": true, 7 | "noUnusedLocals": true, 8 | "outDir": "dist", 9 | "sourceMap": true, 10 | "baseUrl": "./", 11 | "paths": { 12 | "@azure/functions-core": ["types-core"] 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | entry: './dist/src/Worker.js', 3 | output: { 4 | path: `${__dirname}/dist/src`, 5 | filename: 'worker-bundle.js', 6 | library: 'worker', 7 | libraryTarget: 'commonjs2', 8 | }, 9 | target: 'node', 10 | node: { 11 | __dirname: false, 12 | }, 13 | externals: { 14 | '@azure/functions-core': 'commonjs2 @azure/functions-core', 15 | }, 16 | module: { 17 | parser: { 18 | javascript: { 19 | commonjsMagicComments: true, 20 | }, 21 | }, 22 | }, 23 | plugins: [], 24 | }; 25 | -------------------------------------------------------------------------------- /worker.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": { 3 | "language": "node", 4 | "extensions": [".js", ".mjs", ".cjs"], 5 | "defaultExecutablePath": "node", 6 | "defaultWorkerPath": "dist/src/nodejsWorker.js", 7 | "workerIndexing": "true" 8 | }, 9 | "processOptions": { 10 | "initializationTimeout": "00:02:00", 11 | "environmentReloadTimeout": "00:02:00" 12 | } 13 | } 14 | --------------------------------------------------------------------------------