├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── dependency-submission.yml │ ├── nodejs.yml │ ├── publish-dotnet.yml │ ├── publish-extension.yml │ ├── publish-java.yml │ ├── publish-node.yml │ ├── publish-python.yml │ ├── publish-ruby.yml │ ├── python.yml │ ├── repolinter.yml │ └── ruby.yml ├── .gitignore ├── CONTRIBUTING.md ├── Dockerfile.ecrImage ├── LICENSE ├── Makefile ├── README.md ├── THIRD_PARTY_NOTICES.md ├── create-buckets.sh ├── dockerfiles ├── Dockerfile.java11 ├── Dockerfile.java17 ├── Dockerfile.java21 ├── Dockerfile.java8al2 ├── Dockerfile.nodejs18 ├── Dockerfile.nodejs20 ├── Dockerfile.nodejs22 ├── Dockerfile.ruby32 ├── Dockerfile.ruby33 └── Dockerfile.ruby34 ├── dotnet └── publish-layers.sh ├── extension └── publish-layer.sh ├── java ├── .dockerignore ├── .gitignore ├── build.gradle ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── publish-layers.sh ├── settings.gradle └── src │ ├── main │ └── java │ │ └── com │ │ └── newrelic │ │ └── java │ │ ├── HandlerWrapper.java │ │ └── JavaClassLoader.java │ └── test │ └── java │ └── com │ └── newrelic │ └── java │ ├── BiFunctionHandlerWithObjectInput.java │ ├── BiFunctionHandlerWithStringInput.java │ ├── FunctionHandlerWithObjectInput.java │ ├── FunctionHandlerWithStringInput.java │ ├── HandlerWrapperTest.java │ ├── Input.java │ ├── JavaClassLoaderTest.java │ ├── PojoHandlerWithNoArgument.java │ ├── RequestHandlerWithObjectInput.java │ ├── RequestHandlerWithStringInput.java │ └── TestStreamingRequestHandler.java ├── libBuild.sh ├── nodejs ├── .c8rc.json ├── .eslintignore ├── .eslintrc ├── .gitignore ├── .npmrc-ci ├── esm.mjs ├── index.js ├── package-lock.json ├── package.json ├── publish-layers.sh └── test │ ├── integration │ ├── cjs │ │ ├── handler.js │ │ ├── index.tap.js │ │ └── package.json │ ├── esm │ │ ├── .eslintrc.json │ │ ├── handler.js │ │ ├── index.tap.js │ │ └── package.json │ ├── helpers.cjs │ └── serverless.yml │ └── unit │ ├── cjsErrorStates.tap.js │ ├── cjsHandler.tap.js │ ├── esmErrorStates.tap.js │ ├── esmHandler.tap.js │ ├── fixtures │ ├── cjs │ │ ├── badRequire.js │ │ ├── errors.js │ │ └── handler.js │ └── esm │ │ ├── .eslintrc.json │ │ ├── badImport.mjs │ │ ├── errors.mjs │ │ └── handler.mjs │ ├── legacyEsmErrorStates.tap.js │ └── legacyEsmHandler.tap.js ├── python ├── .gitignore ├── newrelic_lambda │ ├── __init__.py │ ├── agent_protocol.py │ ├── cloudwatch_logging.py │ ├── event-sources.json │ └── lambda_handler.py ├── newrelic_lambda_wrapper.py ├── package-lock.json ├── package.json ├── publish-layers.sh └── tests │ ├── conftest.py │ ├── serverless.yml │ ├── src │ └── handler.py │ └── test_lambda_handler.py └── ruby ├── .gitignore ├── .rubocop.yml ├── Gemfile ├── README.md ├── Rakefile ├── bin ├── clean └── setup ├── newrelic_lambda_wrapper.rb ├── publish-layers.sh └── test ├── lambda_wrapper_integration_test.rb ├── lambda_wrapper_parsing_test.rb └── support ├── package-lock.json ├── package.json ├── serverless.yml └── src └── handler.rb /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Bug report \U0001F41B" 3 | about: Describe a scenario in which this project behaves unexpectedly 4 | title: '' 5 | labels: bug, needs-triage 6 | assignees: '' 7 | 8 | --- 9 | 10 | [NOTE]: # ( ^^ Provide a general summary of the issue in the title above. ^^ ) 11 | 12 | ## Description 13 | 14 | [NOTE]: # ( Describe the problem you're encountering. ) 15 | [TIP]: # ( Do NOT give us access or passwords to your New Relic account or API keys! ) 16 | 17 | ## Steps to Reproduce 18 | 19 | [NOTE]: # ( Please be as specific as possible. ) 20 | 21 | ## Expected Behaviour 22 | 23 | [NOTE]: # ( Tell us what you expected to happen. ) 24 | 25 | ## Relevant Logs / Console output 26 | 27 | [NOTE]: # ( Please provide specifics of the local error logs, Browser Dev Tools console, etc. if appropriate and possible. ) 28 | 29 | ## Your Environment 30 | 31 | [TIP]: # ( Include as many relevant details about your environment as possible. ) 32 | 33 | * ex: Browser name and version: 34 | * ex: Operating System and version: 35 | 36 | ## Additional context 37 | 38 | [TIP]: # ( Add any other context about the problem here. ) 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: feature-request, needs-triage 6 | assignees: '' 7 | priority: '' 8 | --- 9 | 10 | ## Is your feature request related to a problem? Please describe. 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | ## Feature Description 14 | A clear and concise description of the feature you want or need. 15 | 16 | ## Describe Alternatives 17 | A clear and concise description of any alternative solutions or features you've considered. Are there examples you could link us to? 18 | 19 | ## Additional context 20 | Add any other context here. 21 | 22 | ## Priority 23 | Please help us better understand this feature request by choosing a priority from the following options: 24 | [Nice to Have, Really Want, Must Have, Blocker] 25 | -------------------------------------------------------------------------------- /.github/workflows/dependency-submission.yml: -------------------------------------------------------------------------------- 1 | # Workflow: Dependency Graph Submission and Vulnerability Reporting 2 | # 3 | # Trigger: This workflow runs on every merge to the master branch. 4 | # 5 | # Purpose: It generates and submits a dependency graph for the java layer to the GitHub Dependency Submission API. The graph is used to 6 | # trigger Dependabot Alerts for vulnerable dependencies, and to populate the Dependency Graph insights view in GitHub. 7 | # 8 | # Includes: 9 | # - Java layer 10 | # 11 | # Excludes: 12 | # - Test-only dependencies 13 | 14 | 15 | name: Dependency Submission 16 | 17 | on: 18 | push: 19 | branches: [ 'master' ] 20 | 21 | permissions: 22 | contents: write 23 | 24 | jobs: 25 | dependency-submission: 26 | runs-on: ubuntu-latest 27 | steps: 28 | - name: Checkout Sources 29 | uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # pin@v4 30 | - name: Setup Java 31 | uses: actions/setup-java@b36c23c0d998641eff861008f374ee103c25ac73 # pin@v4 32 | with: 33 | distribution: 'temurin' 34 | java-version: '11' 35 | - name: Generate and Submit Dependency Graph 36 | uses: gradle/actions/dependency-submission@d156388eb19639ec20ade50009f3d199ce1e2808 # pin@v4 37 | with: 38 | gradle-version: 8.6 39 | build-root-directory: 'java' 40 | dependency-graph-exclude-configurations: '.*[Tt]est(Compile|Runtime)Classpath' 41 | build-scan-publish: true 42 | build-scan-terms-of-use-url: "https://gralde.com/help/legal-terms-of-use" 43 | build-scan-terms-of-use-agree: "yes" 44 | -------------------------------------------------------------------------------- /.github/workflows/nodejs.yml: -------------------------------------------------------------------------------- 1 | name: Node.js Layer CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | paths: 7 | - 'nodejs/**' 8 | pull_request: 9 | paths: 10 | - 'nodejs/**' 11 | workflow_dispatch: 12 | 13 | jobs: 14 | 15 | lint: 16 | runs-on: ubuntu-latest 17 | 18 | strategy: 19 | matrix: 20 | node-version: [lts/*] 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | - name: Use Node.js ${{ matrix.node-version }} 25 | uses: actions/setup-node@v4 26 | with: 27 | node-version: ${{ matrix.node-version }} 28 | - name: Install Dependencies 29 | run: npm ci 30 | working-directory: nodejs 31 | - name: Run Linting 32 | run: npm run lint 33 | working-directory: nodejs 34 | 35 | unit: 36 | runs-on: ubuntu-latest 37 | 38 | strategy: 39 | matrix: 40 | node-version: [18.x, 20.x, 22.x] 41 | steps: 42 | - uses: actions/checkout@v4 43 | - name: Use Node.js ${{ matrix.node-version }} 44 | uses: actions/setup-node@v4 45 | with: 46 | node-version: ${{ matrix.node-version }} 47 | - name: Install Dependencies 48 | run: npm ci 49 | working-directory: nodejs 50 | - name: Run Unit Tests 51 | run: npm run test:unit 52 | working-directory: nodejs 53 | - name: Archive Unit Test Coverage 54 | uses: actions/upload-artifact@v4 55 | with: 56 | name: unit-tests-${{ matrix.node-version }} 57 | path: ./nodejs/coverage/unit/lcov.info 58 | 59 | integration: 60 | runs-on: ubuntu-latest 61 | 62 | strategy: 63 | matrix: 64 | node-version: [18.x, 20.x, 22.x] 65 | 66 | steps: 67 | - uses: actions/checkout@v4 68 | - name: Use Node.js ${{ matrix.node-version }} 69 | uses: actions/setup-node@v4 70 | with: 71 | node-version: ${{ matrix.node-version }} 72 | - name: Install Dependencies 73 | run: npm ci 74 | working-directory: nodejs 75 | - name: Run Integration Tests 76 | run: npm run test:integration 77 | working-directory: nodejs 78 | env: 79 | NODE_RUNTIME: nodejs${{ matrix.node-version }} 80 | -------------------------------------------------------------------------------- /.github/workflows/publish-dotnet.yml: -------------------------------------------------------------------------------- 1 | name: Publish Dotnet Layers 2 | 3 | on: 4 | push: 5 | tags: 6 | - v**_dotnet 7 | 8 | jobs: 9 | publish-dotnet: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check Tag 13 | id: dotnet-check-tag 14 | run: | 15 | if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+(\.[0-9]+)*_dotnet ]]; then 16 | echo "match=true" >> $GITHUB_OUTPUT 17 | fi 18 | - uses: actions/checkout@v4 19 | - name: Set up QEMU 20 | uses: docker/setup-qemu-action@v3 21 | with: 22 | platforms: arm64, amd64 23 | - name: Publish Dotnet Layer 24 | if: steps.dotnet-check-tag.outputs.match == 'true' 25 | env: 26 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 27 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 28 | AGENT_VERSION: ${{ steps.dotnet-check-tag.outputs.version }} 29 | run: | 30 | cd dotnet 31 | ./publish-layers.sh 32 | -------------------------------------------------------------------------------- /.github/workflows/publish-extension.yml: -------------------------------------------------------------------------------- 1 | name: Publish Extension Layer 2 | 3 | on: 4 | push: 5 | tags: 6 | - v**_extension 7 | 8 | jobs: 9 | publish-extension: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: [ 3.12 ] 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Check Tag 17 | id: extension-check-tag 18 | run: | 19 | if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+(\.[0-9]+)*_extension$ ]]; then 20 | echo "match=true" >> $GITHUB_OUTPUT 21 | fi 22 | - name: Install publish dependencies 23 | if: steps.extension-check-tag.outputs.match == 'true' 24 | run: pip install -U awscli 25 | - name: Set up QEMU 26 | uses: docker/setup-qemu-action@v3 27 | with: 28 | platforms: arm64, amd64 29 | - name: Publish extension layer 30 | if: steps.extension-check-tag.outputs.match == 'true' 31 | env: 32 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 33 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 34 | run: | 35 | cd extension 36 | ./publish-layer.sh 37 | -------------------------------------------------------------------------------- /.github/workflows/publish-java.yml: -------------------------------------------------------------------------------- 1 | name: Publish Java Layers 2 | 3 | on: 4 | push: 5 | tags: 6 | - v**_java 7 | 8 | jobs: 9 | publish-java: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | java-version: [ java8al2, java11, java17, java21 ] 14 | 15 | steps: 16 | - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # pin@v4 17 | - name: Set up Java version 18 | run: | 19 | declare -A map_java_version 20 | map_java_version=( 21 | ["java8al2"]="8" 22 | ["java11"]="11" 23 | ["java17"]="17" 24 | ["java21"]="21" 25 | ) 26 | java_numeric_version=${map_java_version[${{ matrix.java-version }}]} 27 | echo "JAVA_NUMERIC_VERSION=$java_numeric_version" >> $GITHUB_ENV 28 | - name: Use Java ${{ env.JAVA_NUMERIC_VERSION }} 29 | uses: actions/setup-java@v4 30 | with: 31 | distribution: 'corretto' 32 | java-version: ${{ env.JAVA_NUMERIC_VERSION }} 33 | - name: Check Tag 34 | id: java-check-tag 35 | run: | 36 | if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+(\.[0-9]+)*_java$ ]]; then 37 | echo "match=true" >> $GITHUB_OUTPUT 38 | fi 39 | - name: Publish ${{ matrix.java-version }} layer 40 | if: steps.java-check-tag.outputs.match == 'true' 41 | env: 42 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 43 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 44 | run: make publish-${{ matrix.java-version }}-ci 45 | - name: Set up QEMU 46 | uses: docker/setup-qemu-action@v3 47 | with: 48 | platforms: arm64, amd64 49 | - name: Publish ECR image for ${{ matrix.java-version }} 50 | if: steps.java-check-tag.outputs.match == 'true' 51 | env: 52 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 53 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 54 | run: | 55 | cd java 56 | ./publish-layers.sh build-publish-${{ matrix.java-version }}-ecr-image 57 | -------------------------------------------------------------------------------- /.github/workflows/publish-node.yml: -------------------------------------------------------------------------------- 1 | name: Publish Node Layers 2 | 3 | on: 4 | push: 5 | tags: 6 | - v**_nodejs 7 | 8 | jobs: 9 | publish-node: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | node-version: [18, 20, 22] 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Use Node.js ${{ matrix.node-version }}.x 17 | uses: actions/setup-node@v4 18 | with: 19 | node-version: ${{ matrix.node-version }} 20 | - name: Check Tag 21 | id: node-check-tag 22 | run: | 23 | if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+(\.[0-9]+)*_nodejs$ ]]; then 24 | echo "match=true" >> $GITHUB_OUTPUT 25 | fi 26 | - name: Run Node ${{ matrix.node-version }} unit tests 27 | if: steps.node-check-tag.outputs.match == 'true' 28 | run: cd nodejs && npm ci && npm run test 29 | - name: Publish layer 30 | if: steps.node-check-tag.outputs.match == 'true' 31 | env: 32 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 33 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 34 | run: make publish-nodejs${{ matrix.node-version }}-ci 35 | - name: Set up QEMU 36 | uses: docker/setup-qemu-action@v3 37 | with: 38 | platforms: arm64, amd64 39 | - name: Publish ECR image for nodejs${{ matrix.node-version }} 40 | if: steps.node-check-tag.outputs.match == 'true' 41 | env: 42 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 43 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 44 | run: | 45 | cd nodejs 46 | ./publish-layers.sh build-publish-${{ matrix.node-version }}-ecr-image 47 | - name: Upload Unit Test Coverage 48 | if: steps.node-check-tag.outputs.match == 'true' 49 | uses: codecov/codecov-action@v5.3.1 50 | with: 51 | token: ${{ secrets.CODECOV_TOKEN }} 52 | files: ./nodejs/coverage/unit/lcov.info 53 | fail_ci_if_error: true 54 | flags: unit-tests 55 | -------------------------------------------------------------------------------- /.github/workflows/publish-python.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python Layers 2 | 3 | on: 4 | push: 5 | tags: 6 | - v**_python 7 | 8 | jobs: 9 | publish-python: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13' ] 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Check Tag 17 | id: python-check-tag 18 | run: | 19 | if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+(\.[0-9]+)*_python$ ]]; then 20 | echo "match=true" >> $GITHUB_OUTPUT 21 | fi 22 | - name: Install python dependencies 23 | if: steps.python-check-tag.outputs.match == 'true' 24 | run: pip install -U awscli 25 | - name: Set up QEMU 26 | uses: docker/setup-qemu-action@v3 27 | with: 28 | platforms: arm64, amd64 29 | - name: Publish Python ${{ matrix.python-version }} layer 30 | if: steps.python-check-tag.outputs.match == 'true' 31 | env: 32 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 33 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 34 | run: | 35 | cd python 36 | ./publish-layers.sh python${{ matrix.python-version }} 37 | -------------------------------------------------------------------------------- /.github/workflows/publish-ruby.yml: -------------------------------------------------------------------------------- 1 | name: Publish Ruby Layers 2 | 3 | on: 4 | push: 5 | tags: 6 | - v**_ruby 7 | 8 | jobs: 9 | publish-ruby: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | ruby-version: [3.2, 3.3, 3.4] 14 | steps: 15 | - uses: actions/checkout@v4.2.2 16 | - name: Use Ruby ${{ matrix.ruby-version }} 17 | uses: ruby/setup-ruby@v1.226.0 18 | with: 19 | ruby-version: ${{ matrix.ruby-version }} 20 | - name: Check Tag 21 | id: ruby-check-tag 22 | run: | 23 | if [[ ${{ github.event.ref }} =~ ^refs/tags/v[0-9]+(\.[0-9]+)*_ruby$ ]]; then 24 | echo "match=true" >> $GITHUB_OUTPUT 25 | fi 26 | - name: Clean the workspace 27 | run: ./bin/clean 28 | working-directory: ruby 29 | - name: Install Ruby Dependencies 30 | run: bundle 31 | working-directory: ruby 32 | - name: Obtain Ruby version without the dot 33 | id: ruby-version-without-dot 34 | run: >- 35 | echo "::set-output name=VERSION::$( 36 | echo ${{ matrix.ruby-version }} | sed 's/\.//' 37 | )" 38 | - name: Set up QEMU 39 | uses: docker/setup-qemu-action@v3 40 | with: 41 | platforms: arm64, amd64 42 | - name: Build and Publish layer 43 | if: steps.ruby-check-tag.outputs.match == 'true' 44 | env: 45 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 46 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 47 | run: ./publish-layers.sh ruby${{ matrix.ruby-version }} 48 | working-directory: ruby 49 | -------------------------------------------------------------------------------- /.github/workflows/python.yml: -------------------------------------------------------------------------------- 1 | name: Python Layer CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | paths: 7 | - "python/**" 8 | pull_request: 9 | paths: 10 | - "python/**" 11 | workflow_dispatch: 12 | 13 | jobs: 14 | tests: 15 | runs-on: ubuntu-latest 16 | timeout-minutes: 5 17 | 18 | strategy: 19 | matrix: 20 | python-version: ['3.8', '3.9', '3.10', '3.11'] 21 | steps: 22 | - uses: actions/checkout@v3 23 | 24 | - uses: actions/setup-python@v3 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | 28 | - uses: actions/setup-node@v4 29 | with: 30 | node-version: '20.x' 31 | 32 | - name: Install Python Dependencies 33 | run: pip install pytest requests 34 | 35 | - name: Install Node Dependencies 36 | run: npm ci 37 | working-directory: python 38 | 39 | - name: Run Tests 40 | run: pytest tests/ 41 | working-directory: python 42 | env: 43 | PYTHON_RUNTIME: "python${{ matrix.python-version }}" 44 | -------------------------------------------------------------------------------- /.github/workflows/repolinter.yml: -------------------------------------------------------------------------------- 1 | # NOTE: This file should always be named `repolinter.yml` to allow 2 | # workflow_dispatch to work properly 3 | name: Repolinter Action 4 | 5 | # NOTE: This workflow will ONLY check the default branch! 6 | # Currently there is no elegant way to specify the default 7 | # branch in the event filtering, so branches are instead 8 | # filtered in the "Test Default Branch" step. 9 | on: [ push, workflow_dispatch ] 10 | 11 | jobs: 12 | repolint: 13 | name: Run Repolinter 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Test Default Branch 17 | id: default-branch 18 | uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # pin@v7 19 | with: 20 | script: | 21 | const data = await github.rest.repos.get(context.repo) 22 | return data.data && data.data.default_branch === context.ref.split('/').slice(-1)[0] 23 | - name: Checkout Self 24 | if: ${{ steps.default-branch.outputs.result == 'true' }} 25 | uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # pin@v4 26 | - name: Run Repolinter 27 | if: ${{ steps.default-branch.outputs.result == 'true' }} 28 | uses: newrelic/repolinter-action@3f4448f855c351e9695b24524a4111c7847b84cb # pin@v1 29 | with: 30 | config_url: https://raw.githubusercontent.com/newrelic/.github/main/repolinter-rulesets/community-plus.yml 31 | output_type: issue 32 | -------------------------------------------------------------------------------- /.github/workflows/ruby.yml: -------------------------------------------------------------------------------- 1 | name: Ruby Layer CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | paths: 7 | - 'ruby/**' 8 | pull_request: 9 | paths: 10 | - 'ruby/**' 11 | workflow_dispatch: 12 | branches: [master] 13 | paths: 14 | - 'ruby/**' 15 | 16 | jobs: 17 | lint: 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v4.2.2 22 | - uses: ruby/setup-ruby@v1.226.0 23 | with: 24 | ruby-version: '3.3' 25 | - run: bundle 26 | working-directory: ruby 27 | - run: rubocop 28 | working-directory: ruby 29 | 30 | unit: 31 | runs-on: ubuntu-latest 32 | 33 | steps: 34 | - uses: actions/checkout@v4.2.2 35 | - uses: ruby/setup-ruby@v1.226.0 36 | with: 37 | ruby-version: '3.3' 38 | # Node.js powers serverless-offline, used for unit tests 39 | - uses: actions/setup-node@v4 40 | with: 41 | node-version: 20.x 42 | - name: Install Node.js Dependencies 43 | run: npm install 44 | working-directory: ruby/test/support 45 | - name: Install Ruby Depedencies 46 | run: bundle 47 | working-directory: ruby 48 | - name: Run Unit Tests 49 | run: bundle exec rake test 50 | working-directory: ruby 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | dist 3 | .DS_Store 4 | 5 | # PYTHON GITIGNORE 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | cover/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | db.sqlite3 67 | db.sqlite3-journal 68 | 69 | # Flask stuff: 70 | instance/ 71 | .webassets-cache 72 | 73 | # Scrapy stuff: 74 | .scrapy 75 | 76 | # Sphinx documentation 77 | docs/_build/ 78 | 79 | # PyBuilder 80 | .pybuilder/ 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | # For a library or package, you might want to ignore these files since the code is 92 | # intended to run in multiple environments; otherwise, check them in: 93 | # .python-version 94 | 95 | # pipenv 96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 99 | # install all needed dependencies. 100 | #Pipfile.lock 101 | 102 | # poetry 103 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 104 | # This is especially recommended for binary packages to ensure reproducibility, and is more 105 | # commonly ignored for libraries. 106 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 107 | #poetry.lock 108 | 109 | # pdm 110 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 111 | #pdm.lock 112 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 113 | # in version control. 114 | # https://pdm.fming.dev/#use-with-ide 115 | .pdm.toml 116 | 117 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 118 | __pypackages__/ 119 | 120 | # Celery stuff 121 | celerybeat-schedule 122 | celerybeat.pid 123 | 124 | # SageMath parsed files 125 | *.sage.py 126 | 127 | # Environments 128 | .env 129 | .venv 130 | env/ 131 | venv/ 132 | ENV/ 133 | env.bak/ 134 | venv.bak/ 135 | 136 | # Spyder project settings 137 | .spyderproject 138 | .spyproject 139 | 140 | # Rope project settings 141 | .ropeproject 142 | 143 | # mkdocs documentation 144 | /site 145 | 146 | # mypy 147 | .mypy_cache/ 148 | .dmypy.json 149 | dmypy.json 150 | 151 | # Pyre type checker 152 | .pyre/ 153 | 154 | # pytype static type analyzer 155 | .pytype/ 156 | 157 | # Cython debug symbols 158 | cython_debug/ 159 | 160 | # NODE GITIGNORE 161 | # Logs 162 | logs 163 | *.log 164 | npm-debug.log* 165 | yarn-debug.log* 166 | yarn-error.log* 167 | lerna-debug.log* 168 | .pnpm-debug.log* 169 | 170 | # Diagnostic reports (https://nodejs.org/api/report.html) 171 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 172 | 173 | # Runtime data 174 | pids 175 | *.pid 176 | *.seed 177 | *.pid.lock 178 | 179 | # Directory for instrumented libs generated by jscoverage/JSCover 180 | lib-cov 181 | 182 | # Coverage directory used by tools like istanbul 183 | coverage 184 | *.lcov 185 | 186 | # nyc test coverage 187 | .nyc_output 188 | 189 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 190 | .grunt 191 | 192 | # Bower dependency directory (https://bower.io/) 193 | bower_components 194 | 195 | # node-waf configuration 196 | .lock-wscript 197 | 198 | # Compiled binary addons (https://nodejs.org/api/addons.html) 199 | build/Release 200 | 201 | # Dependency directories 202 | node_modules/ 203 | jspm_packages/ 204 | 205 | # Snowpack dependency directory (https://snowpack.dev/) 206 | web_modules/ 207 | 208 | # TypeScript cache 209 | *.tsbuildinfo 210 | 211 | # Optional npm cache directory 212 | .npm 213 | 214 | # Optional eslint cache 215 | .eslintcache 216 | 217 | # Optional stylelint cache 218 | .stylelintcache 219 | 220 | # Microbundle cache 221 | .rpt2_cache/ 222 | .rts2_cache_cjs/ 223 | .rts2_cache_es/ 224 | .rts2_cache_umd/ 225 | 226 | # Optional REPL history 227 | .node_repl_history 228 | 229 | # Output of 'npm pack' 230 | *.tgz 231 | 232 | # Yarn Integrity file 233 | .yarn-integrity 234 | 235 | # dotenv environment variable files 236 | .env 237 | .env.development.local 238 | .env.test.local 239 | .env.production.local 240 | .env.local 241 | 242 | # parcel-bundler cache (https://parceljs.org/) 243 | .cache 244 | .parcel-cache 245 | 246 | # Next.js build output 247 | .next 248 | out 249 | 250 | # Nuxt.js build / generate output 251 | .nuxt 252 | dist 253 | 254 | # Gatsby files 255 | .cache/ 256 | # Comment in the public line in if your project uses Gatsby and not Next.js 257 | # https://nextjs.org/blog/next-9-1#public-directory-support 258 | # public 259 | 260 | # vuepress build output 261 | .vuepress/dist 262 | 263 | # vuepress v2.x temp and cache directory 264 | .temp 265 | .cache 266 | 267 | # Docusaurus cache and generated files 268 | .docusaurus 269 | 270 | # Serverless directories 271 | .serverless/ 272 | 273 | # FuseBox cache 274 | .fusebox/ 275 | 276 | # DynamoDB Local files 277 | .dynamodb/ 278 | 279 | # TernJS port file 280 | .tern-port 281 | 282 | # Stores VSCode versions used for testing VSCode extensions 283 | .vscode-test 284 | 285 | # yarn v2 286 | .yarn/cache 287 | .yarn/unplugged 288 | .yarn/build-state.yml 289 | .yarn/install-state.gz 290 | .pnp.* 291 | 292 | # JAVA GITIGNORE 293 | # Compiled class file 294 | *.class 295 | 296 | # Log file 297 | *.log 298 | 299 | # BlueJ files 300 | *.ctxt 301 | 302 | # Mobile Tools for Java (J2ME) 303 | .mtj.tmp/ 304 | 305 | # Package Files # 306 | *.jar 307 | *.war 308 | *.nar 309 | *.ear 310 | *.zip 311 | *.tar.gz 312 | *.rar 313 | 314 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 315 | hs_err_pid* 316 | replay_pid* -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Contributions are always welcome. Before contributing please read the 4 | [code of conduct](https://opensource.newrelic.com/code-of-conduct/) and [search the issue tracker](issues); your issue may have already been discussed or fixed in `master`. To contribute, 5 | [fork](https://help.github.com/articles/fork-a-repo/) this repository, commit your changes, and [send a Pull Request](https://help.github.com/articles/using-pull-requests/). 6 | 7 | Note that our [code of conduct](./CODE_OF_CONDUCT.md) applies to all platforms and venues related to this project; please follow it in all your interactions with the project and its participants. 8 | 9 | ## Feature Requests 10 | 11 | Feature requests should be submitted in the [Issue tracker](../../issues), with a description of the expected behavior & use case, where they’ll remain closed until sufficient interest, [e.g. :+1: reactions](https://help.github.com/articles/about-discussions-in-issues-and-pull-requests/), has been [shown by the community](../../issues?q=label%3A%22votes+needed%22+sort%3Areactions-%2B1-desc). 12 | Before submitting an Issue, please search for similar ones in the 13 | [closed issues](../../issues?q=is%3Aissue+is%3Aclosed+label%3Aenhancement). 14 | 15 | ## Pull Requests 16 | 17 | 1. Ensure any install or build dependencies are removed before the end of the layer when doing a build. 18 | 2. Increase the version numbers in any examples files and the README.md to the new version that this Pull Request would represent. The versioning scheme we use is [SemVer](http://semver.org/). 19 | 3. You may merge the Pull Request in once you have the sign-off of two other developers, or if you do not have permission to do that, you may request the second reviewer to merge it for you. 20 | 21 | ## Code of Conduct 22 | 23 | Before contributing please read the [code of conduct](https://github.com/newrelic/.github/blob/main/CODE_OF_CONDUCT.md) 24 | 25 | Note that our [code of conduct](https://github.com/newrelic/.github/blob/main/CODE_OF_CONDUCT.md) applies to all platforms 26 | and venues related to this project; please follow it in all your interactions 27 | with the project and its participants. 28 | 29 | ## Contributor License Agreement 30 | 31 | Keep in mind that when you submit your Pull Request, you'll need to sign the CLA via the click-through using CLA-Assistant. If you'd like to execute our corporate CLA, or if you have any questions, please drop us an email at opensource@newrelic.com. 32 | 33 | For more information about CLAs, please check out Alex Russell’s excellent post, 34 | [“Why Do I Need to Sign This?”](https://infrequently.org/2008/06/why-do-i-need-to-sign-this/). 35 | 36 | -------------------------------------------------------------------------------- /Dockerfile.ecrImage: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | 3 | ARG layer_zip 4 | ARG file_without_dist 5 | 6 | RUN apk update && apk add --no-cache curl unzip 7 | 8 | WORKDIR / 9 | 10 | COPY ${layer_zip} . 11 | 12 | RUN unzip ${file_without_dist} -d ./opt 13 | RUN rm ${file_without_dist} 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | build-java8al2: 2 | docker build \ 3 | --no-cache \ 4 | -t newrelic-lambda-layers-java8al2 \ 5 | -f ./dockerfiles/Dockerfile.java8al2 \ 6 | . 7 | 8 | publish-java8al2-ci: build-java8al2 9 | docker run \ 10 | -e AWS_ACCESS_KEY_ID \ 11 | -e AWS_SECRET_ACCESS_KEY \ 12 | newrelic-lambda-layers-java8al2 13 | 14 | publish-java8al2-local: build-java8al2 15 | docker run \ 16 | -e AWS_PROFILE \ 17 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 18 | newrelic-lambda-layers-java8al2 19 | 20 | build-java11: 21 | docker build \ 22 | --no-cache \ 23 | -t newrelic-lambda-layers-java11 \ 24 | -f ./dockerfiles/Dockerfile.java11 \ 25 | . 26 | 27 | publish-java11-ci: build-java11 28 | docker run \ 29 | -e AWS_ACCESS_KEY_ID \ 30 | -e AWS_SECRET_ACCESS_KEY \ 31 | newrelic-lambda-layers-java11 32 | 33 | publish-java11-local: build-java11 34 | docker run \ 35 | -e AWS_PROFILE \ 36 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 37 | newrelic-lambda-layers-java11 38 | 39 | build-java17: 40 | docker build \ 41 | --no-cache \ 42 | -t newrelic-lambda-layers-java17 \ 43 | -f ./dockerfiles/Dockerfile.java17 \ 44 | . 45 | 46 | publish-java17-ci: build-java17 47 | docker run \ 48 | -e AWS_ACCESS_KEY_ID \ 49 | -e AWS_SECRET_ACCESS_KEY \ 50 | newrelic-lambda-layers-java17 51 | 52 | publish-java17-local: build-java17 53 | docker run \ 54 | -e AWS_PROFILE \ 55 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 56 | newrelic-lambda-layers-java17 57 | 58 | build-java21: 59 | docker build \ 60 | --no-cache \ 61 | -t newrelic-lambda-layers-java21 \ 62 | -f ./dockerfiles/Dockerfile.java21 \ 63 | . 64 | 65 | publish-java21-ci: build-java21 66 | docker run \ 67 | -e AWS_ACCESS_KEY_ID \ 68 | -e AWS_SECRET_ACCESS_KEY \ 69 | newrelic-lambda-layers-java21 70 | 71 | publish-java21-local: build-java21 72 | docker run \ 73 | -e AWS_PROFILE \ 74 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 75 | newrelic-lambda-layers-java21 76 | 77 | build-nodejs18: 78 | docker build \ 79 | --no-cache \ 80 | -t newrelic-lambda-layers-nodejs18 \ 81 | -f ./dockerfiles/Dockerfile.nodejs18 \ 82 | . 83 | 84 | publish-nodejs18-ci: build-nodejs18 85 | docker run \ 86 | -e AWS_ACCESS_KEY_ID \ 87 | -e AWS_SECRET_ACCESS_KEY \ 88 | newrelic-lambda-layers-nodejs18 89 | 90 | publish-nodejs18-local: build-nodejs18 91 | docker run \ 92 | -e AWS_PROFILE \ 93 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 94 | newrelic-lambda-layers-nodejs18 95 | 96 | build-nodejs20: 97 | docker build \ 98 | --no-cache \ 99 | -t newrelic-lambda-layers-nodejs20 \ 100 | -f ./dockerfiles/Dockerfile.nodejs20 \ 101 | . 102 | 103 | publish-nodejs20-ci: build-nodejs20 104 | docker run \ 105 | -e AWS_ACCESS_KEY_ID \ 106 | -e AWS_SECRET_ACCESS_KEY \ 107 | newrelic-lambda-layers-nodejs20 108 | 109 | publish-nodejs20-local: build-nodejs20 110 | docker run \ 111 | -e AWS_PROFILE \ 112 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 113 | newrelic-lambda-layers-nodejs20 114 | 115 | build-nodejs22: 116 | docker build \ 117 | --no-cache \ 118 | -t newrelic-lambda-layers-nodejs22 \ 119 | -f ./dockerfiles/Dockerfile.nodejs22 \ 120 | . 121 | 122 | publish-nodejs22-ci: build-nodejs22 123 | docker run \ 124 | -e AWS_ACCESS_KEY_ID \ 125 | -e AWS_SECRET_ACCESS_KEY \ 126 | newrelic-lambda-layers-nodejs22 127 | 128 | publish-nodejs22-local: build-nodejs22 129 | docker run \ 130 | -e AWS_PROFILE \ 131 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 132 | newrelic-lambda-layers-nodejs22 133 | 134 | build-ruby32: 135 | docker build \ 136 | --no-cache \ 137 | -t newrelic-lambda-layers-ruby32 \ 138 | -f ./dockerfiles/Dockerfile.ruby32 \ 139 | . 140 | 141 | publish-ruby32-ci: build-ruby32 142 | docker run \ 143 | -e AWS_ACCESS_KEY_ID \ 144 | -e AWS_SECRET_ACCESS_KEY \ 145 | newrelic-lambda-layers-ruby32 146 | 147 | publish-ruby32-local: build-ruby32 148 | docker run \ 149 | -e AWS_PROFILE \ 150 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 151 | newrelic-lambda-layers-ruby32 152 | 153 | build-ruby33: 154 | docker build \ 155 | --no-cache \ 156 | -t newrelic-lambda-layers-ruby33 \ 157 | -f ./dockerfiles/Dockerfile.ruby33 \ 158 | . 159 | 160 | publish-ruby33-ci: build-ruby33 161 | docker run \ 162 | -e AWS_ACCESS_KEY_ID \ 163 | -e AWS_SECRET_ACCESS_KEY \ 164 | newrelic-lambda-layers-ruby33 165 | 166 | publish-ruby33-local: build-ruby33 167 | docker run \ 168 | -e AWS_PROFILE \ 169 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 170 | newrelic-lambda-layers-ruby33 171 | 172 | build-ruby34: 173 | docker build \ 174 | --no-cache \ 175 | -t newrelic-lambda-layers-ruby34 \ 176 | -f ./dockerfiles/Dockerfile.ruby34 \ 177 | . 178 | 179 | publish-ruby34-ci: build-ruby34 180 | docker run \ 181 | -e AWS_ACCESS_KEY_ID \ 182 | -e AWS_SECRET_ACCESS_KEY \ 183 | newrelic-lambda-layers-ruby34 184 | 185 | publish-ruby34-local: build-ruby34 186 | docker run \ 187 | -e AWS_PROFILE \ 188 | -v "${HOME}/.aws:/home/newrelic-lambda-layers/.aws" \ 189 | newrelic-lambda-layers-ruby34 -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Community Plus header](https://github.com/newrelic/opensource-website/raw/main/src/images/categories/Community_Plus.png)](https://opensource.newrelic.com/oss-category/#community-plus) 2 | 3 | # New Relic Lambda Layers 4 | 5 | This repository contains source code and utilities to build and publish New Relic's public AWS Lambda layers. 6 | 7 | Most users should use our published layers which are chosen automatically via the [CLI tool](https://github.com/newrelic/newrelic-lambda-cli). Those layers are published to be public and are available [here](https://layers.newrelic-external.com/). 8 | 9 | This tool is released for users seeking to deploy their own copies of the New Relic Lambda Layers into their accounts, or to modify and publish their own customized wrapper layers. 10 | 11 | ## Requirements: 12 | 13 | * aws-cli 14 | * bash shell 15 | 16 | The AWS cli must be configured, please refer to its [documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html). 17 | 18 | ## Publishing Layers: 19 | To publish the layer, modify the runtime according to the options provided in the `.publish-layers.sh` script. Then, run the following command in your shell: 20 | 21 | ``` 22 | cd python 23 | ./publish-layers.sh python3.12 24 | cd .. 25 | ``` 26 | 27 | ``` 28 | cd nodejs 29 | ./publish-layers.sh nodejs20 30 | cd .. 31 | ``` 32 | 33 | ``` 34 | cd ruby 35 | ./publish-layers.sh ruby3.3 36 | cd .. 37 | ``` 38 | 39 | ``` 40 | cd java 41 | ./publish-layers.sh java21 42 | cd .. 43 | ``` 44 | 45 | ``` 46 | cd dotnet 47 | ./publish-layers.sh 48 | cd .. 49 | ``` 50 | 51 | ``` 52 | cd extension 53 | ./publish-layer.sh 54 | cd .. 55 | ``` 56 | 57 | ## Attaching Custom Lambda Layer ARNs 58 | 59 | The layers published to your account may be used directly within SAM, Cloudformation Templates, Serverless.yml, or other configuration methods that allow specifying the use of layers by ARN. 60 | 61 | New Relic Serverless APM customers are advised to use the [newrelic-lambda-cli tool](https://github.com/newrelic/newrelic-lambda-cli), and this may be used with custom layers as follows by adding the `--layer-arn` flag to the layers install command: 62 | 63 | ``` 64 | newrelic-lambda layers install \ 65 | --function \ 66 | --nr-account-id 67 | --layer-arn 68 | ``` 69 | 70 | ## Manual Instrumentation using Layers: 71 | 72 | We recommend using the [newrelic-lambda-cli tool](https://github.com/newrelic/newrelic-lambda-cli), but some users find that they need, or prefer to manually configure their functions. 73 | 74 | These steps will help you configure the layers correctly: 75 | 76 | 1. Find the New Relic AWS Lambda Layer ARN that matches your runtime and region. 77 | 2. Copy the ARN of the most recent AWS Lambda Layer version and attach it to your function. 78 | * Using Cloudformation, this refers to adding your layer arn to the Layers property of a [AWS::Lambda::Function resource](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-function.html). 79 | 3. Update your functions handler to point to the newly attached layer in the console for your function: 80 | * Python: `newrelic_lambda_wrapper.handler` 81 | * Node: 82 | * CommonJS: `newrelic-lambda-wrapper.handler` 83 | * ESM: `/opt/nodejs/node_modules/newrelic-esm-lambda-wrapper/index.handler` - You must specify the full path to the wrapper in the layer because the AWS Lambda Runtime doesn't support importing from a layer. 84 | * Ruby: `newrelic_lambda_wrapper.handler` 85 | * Java: 86 | * RequestHandler implementation: `com.newrelic.java.HandlerWrapper::handleRequest` 87 | * RequestStreamHandlerWrapper implementation: `com.newrelic.java.HandlerWrapper::handleStreamsRequest` 88 | * .NET: This step is not required. 89 | 4. Add these environment variables to your Lambda console: 90 | * NEW_RELIC_ACCOUNT_ID: Your New Relic account ID 91 | * NEW_RELIC_LAMBDA_HANDLER: Path to your initial handler. 92 | * NEW_RELIC_USE_ESM: For Node.js handlers using ES Modules, set to `true`. 93 | * CORECLR_ENABLE_PROFILING (.NET only): 1 94 | * CORECLR_PROFILER (.NET only): {36032161-FFC0-4B61-B559-F6C5D41BAE5A} 95 | * CORECLR_NEWRELIC_HOME (.NET only): /opt/lib/newrelic-dotnet-agent 96 | * CORECLR_PROFILER_PATH (.NET only): /opt/lib/newrelic-dotnet-agent/libNewRelicProfiler.so 97 | 98 | Refer to the [New Relic AWS Lambda Monitoring Documentation](https://docs.newrelic.com/docs/serverless-function-monitoring/aws-lambda-monitoring/get-started/enable-new-relic-monitoring-aws-lambda) for instructions on completing your configuration by linking your AWS Account and Cloudwatch Log Streams to New Relic. 99 | 100 | ## Support for ES Modules (Node.js) 101 | 102 | AWS announced support for Node 18 as a Lambda runtime in late 2022, introducing `aws-sdk` version 3 for Node 18 only. This version of `aws-sdk` patches `NODE_PATH`, so ESM-supporting functions using `import` and top-level `await` should work as expected with Lambda Layer releases `v9.8.1.1` and above (Numerical layer versions vary by region and runtime). To configure the layer to leverage `import`, add this environment variable to use our ESM loader: `NODE_OPTIONS: --experimental-loader newrelic/esm-loader.mjs`. **Note**: The function handler should also use `newrelic-esm-lambda-wrapper.handler`. 103 | 104 | ### Note on performance for ES Module functions 105 | 106 | If you are using the legacy `NEW_RELIC_USE_ESM` flag on the `newrelic-lambda-wrapper.handler`, and your ESM function depends on a large number of dependency and file imports, you may see long cold start times as a result. As a workaround, we recommend using the ESM wrapper. Set your handler to `/opt/nodejs/node_modules/newrelic-esm-lambda-wrapper/index.handler`. 107 | 108 | ### Note on legacy `NEW_RELIC_USE_ESM` environment variable 109 | Prior to Lambda Layer releases `v12.16.0`, the wrapper for CommonJS and ESM were the same. If you wanted to wrap a ESM lambda handler you had to set `NEW_RELIC_USE_ESM` to `true`. This functionality still exists but has been deprecated. If you have a ESM lambda handler set the function handler to point to `newrelic-esm-lambda-wrapper.handler`. We will be removing `NEW_RELIC_USE_ESM` at a future date. 110 | 111 | ## Support 112 | 113 | Should you need assistance with New Relic products, you are in good hands with several support channels. 114 | 115 | If the issue has been confirmed as a bug or is a feature request, please file a GitHub issue. 116 | 117 | **Support Channels** 118 | 119 | * [New Relic Documentation](https://docs.newrelic.com/docs/serverless-function-monitoring/aws-lambda-monitoring/get-started/monitoring-aws-lambda-serverless-monitoring/): Comprehensive guidance for using our platform 120 | * [New Relic Community](https://discuss.newrelic.com/tags/c/full-stack-observability/serverless/): The best place to engage in troubleshooting questions 121 | * [New Relic Developer](https://developer.newrelic.com/): Resources for building a custom observability applications 122 | * [New Relic University](https://learn.newrelic.com/): A range of online training for New Relic users of every level 123 | * [New Relic Technical Support](https://support.newrelic.com/) 24/7/365 ticketed support. Read more about our [Technical Support Offerings](https://docs.newrelic.com/docs/licenses/license-information/general-usage-licenses/support-plan). 124 | 125 | ## Privacy 126 | At New Relic we take your privacy and the security of your information seriously, and are committed to protecting your information. We must emphasize the importance of not sharing personal data in public forums, and ask all users to scrub logs and diagnostic information for sensitive information, whether personal, proprietary, or otherwise. 127 | 128 | We define “Personal Data” as any information relating to an identified or identifiable individual, including, for example, your name, phone number, post code or zip code, Device ID, IP address and email address. 129 | 130 | Please review [New Relic’s General Data Privacy Notice](https://newrelic.com/termsandconditions/privacy) for more information. 131 | 132 | ## Contribute 133 | 134 | We encourage your contributions to improve the New Relic Lambda layers! Keep in mind when you submit your pull request, you'll need to sign the CLA via the click-through using CLA-Assistant. You only have to sign the CLA one time per project. 135 | 136 | If you have any questions, or to execute our corporate CLA, required if your contribution is on behalf of a company, please drop us an email at opensource@newrelic.com. 137 | 138 | **A note about vulnerabilities** 139 | 140 | As noted in our [security policy](../../security/policy), New Relic is committed to the privacy and security of our customers and their data. We believe that providing coordinated disclosure by security researchers and engaging with the security community are important means to achieve our security goals. 141 | 142 | If you believe you have found a security vulnerability in this project or any of New Relic's products or websites, we welcome and greatly appreciate you reporting it to New Relic through [HackerOne](https://hackerone.com/newrelic). 143 | 144 | If you would like to contribute to this project, review [these guidelines](./CONTRIBUTING.md). 145 | 146 | To [all contributors](https://github.com/newrelic/newrelic-lambda-layers/graphs/contributors), we thank you! Without your contribution, this project would not be what it is today. 147 | 148 | ## License 149 | 150 | The New Relic Lambda layers are licensed under the [Apache 2.0](http://apache.org/licenses/LICENSE-2.0.txt) License. 151 | 152 | The New Relic Lambda layers also use source code from third-party libraries. You can find full details on which libraries are used and the terms under which they are licensed in [the third-party notices document](https://github.com/newrelic/newrelic-lambda-layers/blob/main/THIRD_PARTY_NOTICES.md). 153 | -------------------------------------------------------------------------------- /THIRD_PARTY_NOTICES.md: -------------------------------------------------------------------------------- 1 | # Third Party Notices 2 | 3 | The newrelic-lambda-layers project uses source code from third party libraries which carry 4 | their own copyright notices and license terms. These notices are provided 5 | below. 6 | 7 | In the event that a required notice is missing or incorrect, please notify us 8 | by e-mailing [open-source@newrelic.com](mailto:open-source@newrelic.com). 9 | 10 | For any licenses that require the disclosure of source 11 | code, the source code can be found at [https://github.com/newrelic/nr1-browser-analyzer](https://github.com/newrelic/nr1-browser-analyzer). 12 | 13 | ## Content 14 | 15 | **[dependencies](#dependencies)** 16 | 17 | N/A 18 | 19 | **[devDependencies](#devDependencies)** 20 | 21 | * eslint 22 | * eslint-plugin-import 23 | -------------------------------------------------------------------------------- /create-buckets.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | 3 | BUCKET_PREFIX=nr-layers 4 | REGIONS=( 5 | ap-northeast-1 6 | ap-northeast-2 7 | ap-south-1 8 | ap-southeast-1 9 | ap-southeast-2 10 | ca-central-1 11 | eu-central-1 12 | eu-north-1 13 | eu-west-1 14 | eu-west-2 15 | eu-west-3 16 | sa-east-1 17 | us-east-1 18 | us-east-2 19 | us-west-1 20 | us-west-2 21 | ) 22 | 23 | for region in "${REGIONS[@]}"; do 24 | bucket_name="${BUCKET_PREFIX}-${region}" 25 | aws s3 mb "s3://${bucket_name}" --region $region 26 | done 27 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.java11: -------------------------------------------------------------------------------- 1 | FROM openjdk:11 as builder 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y curl unzip zip 5 | 6 | RUN useradd -m newrelic-lambda-layers 7 | USER newrelic-lambda-layers 8 | WORKDIR /home/newrelic-lambda-layers 9 | 10 | COPY --chown=newrelic-lambda-layers libBuild.sh . 11 | COPY --chown=newrelic-lambda-layers java java/ 12 | 13 | WORKDIR java 14 | RUN ./publish-layers.sh build-java11 15 | 16 | FROM python:3.8 17 | 18 | RUN useradd -m newrelic-lambda-layers 19 | USER newrelic-lambda-layers 20 | WORKDIR /home/newrelic-lambda-layers 21 | RUN pip3 install -U awscli --user 22 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 23 | 24 | COPY libBuild.sh . 25 | COPY java java/ 26 | COPY --from=builder /home/newrelic-lambda-layers/java/dist java/dist 27 | 28 | WORKDIR java 29 | CMD ./publish-layers.sh publish-java11 30 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.java17: -------------------------------------------------------------------------------- 1 | FROM eclipse-temurin:17 as builder 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y curl unzip zip 5 | 6 | RUN useradd -m newrelic-lambda-layers 7 | USER newrelic-lambda-layers 8 | WORKDIR /home/newrelic-lambda-layers 9 | 10 | COPY --chown=newrelic-lambda-layers libBuild.sh . 11 | COPY --chown=newrelic-lambda-layers java java/ 12 | 13 | WORKDIR java 14 | RUN ./publish-layers.sh build-java17 15 | 16 | FROM python:3.8 17 | 18 | RUN useradd -m newrelic-lambda-layers 19 | USER newrelic-lambda-layers 20 | WORKDIR /home/newrelic-lambda-layers 21 | RUN pip3 install -U awscli --user 22 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 23 | 24 | COPY libBuild.sh . 25 | COPY java java/ 26 | COPY --from=builder /home/newrelic-lambda-layers/java/dist java/dist 27 | 28 | WORKDIR java 29 | CMD ./publish-layers.sh publish-java17 30 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.java21: -------------------------------------------------------------------------------- 1 | FROM eclipse-temurin:21 as builder 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y curl unzip zip 5 | 6 | RUN useradd -m newrelic-lambda-layers 7 | USER newrelic-lambda-layers 8 | WORKDIR /home/newrelic-lambda-layers 9 | 10 | COPY --chown=newrelic-lambda-layers libBuild.sh . 11 | COPY --chown=newrelic-lambda-layers java java/ 12 | 13 | WORKDIR java 14 | RUN ./publish-layers.sh build-java21 15 | 16 | FROM python:3.8 17 | 18 | RUN useradd -m newrelic-lambda-layers 19 | USER newrelic-lambda-layers 20 | WORKDIR /home/newrelic-lambda-layers 21 | RUN pip3 install -U awscli --user 22 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 23 | 24 | COPY libBuild.sh . 25 | COPY java java/ 26 | COPY --from=builder /home/newrelic-lambda-layers/java/dist java/dist 27 | 28 | WORKDIR java 29 | CMD ./publish-layers.sh publish-java21 30 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.java8al2: -------------------------------------------------------------------------------- 1 | FROM openjdk:8 as builder 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y curl unzip zip 5 | 6 | RUN useradd -m newrelic-lambda-layers 7 | USER newrelic-lambda-layers 8 | WORKDIR /home/newrelic-lambda-layers 9 | 10 | COPY --chown=newrelic-lambda-layers libBuild.sh . 11 | COPY --chown=newrelic-lambda-layers java java/ 12 | 13 | WORKDIR java 14 | RUN ./publish-layers.sh build-java8al2 15 | 16 | FROM python:3.8 17 | 18 | RUN useradd -m newrelic-lambda-layers 19 | USER newrelic-lambda-layers 20 | WORKDIR /home/newrelic-lambda-layers 21 | RUN pip3 install -U awscli --user 22 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 23 | 24 | COPY libBuild.sh . 25 | COPY java java/ 26 | COPY --from=builder /home/newrelic-lambda-layers/java/dist java/dist 27 | 28 | WORKDIR java 29 | CMD ./publish-layers.sh publish-java8al2 30 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.nodejs18: -------------------------------------------------------------------------------- 1 | FROM node:18 as builder 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y curl unzip zip 5 | 6 | RUN useradd -m newrelic-lambda-layers 7 | USER newrelic-lambda-layers 8 | WORKDIR /home/newrelic-lambda-layers 9 | 10 | COPY --chown=newrelic-lambda-layers libBuild.sh . 11 | COPY --chown=newrelic-lambda-layers nodejs nodejs/ 12 | 13 | WORKDIR nodejs 14 | RUN ./publish-layers.sh build-18 15 | 16 | FROM python:3.8 17 | 18 | RUN useradd -m newrelic-lambda-layers 19 | USER newrelic-lambda-layers 20 | WORKDIR /home/newrelic-lambda-layers 21 | RUN pip3 install -U awscli --user 22 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 23 | 24 | COPY libBuild.sh . 25 | COPY nodejs nodejs/ 26 | COPY --from=builder /home/newrelic-lambda-layers/nodejs/dist nodejs/dist 27 | 28 | WORKDIR nodejs 29 | CMD ./publish-layers.sh publish-18 30 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.nodejs20: -------------------------------------------------------------------------------- 1 | FROM node:20 as builder 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y curl unzip zip 5 | 6 | RUN useradd -m newrelic-lambda-layers 7 | USER newrelic-lambda-layers 8 | WORKDIR /home/newrelic-lambda-layers 9 | 10 | COPY --chown=newrelic-lambda-layers libBuild.sh . 11 | COPY --chown=newrelic-lambda-layers nodejs nodejs/ 12 | 13 | WORKDIR nodejs 14 | RUN ./publish-layers.sh build-20 15 | 16 | FROM python:3.8 17 | 18 | RUN useradd -m newrelic-lambda-layers 19 | USER newrelic-lambda-layers 20 | WORKDIR /home/newrelic-lambda-layers 21 | RUN pip3 install -U awscli --user 22 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 23 | 24 | COPY libBuild.sh . 25 | COPY nodejs nodejs/ 26 | COPY --from=builder /home/newrelic-lambda-layers/nodejs/dist nodejs/dist 27 | 28 | WORKDIR nodejs 29 | CMD ./publish-layers.sh publish-20 30 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.nodejs22: -------------------------------------------------------------------------------- 1 | FROM node:22 as builder 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y curl unzip zip 5 | 6 | RUN useradd -m newrelic-lambda-layers 7 | USER newrelic-lambda-layers 8 | WORKDIR /home/newrelic-lambda-layers 9 | 10 | COPY --chown=newrelic-lambda-layers libBuild.sh . 11 | COPY --chown=newrelic-lambda-layers nodejs nodejs/ 12 | 13 | WORKDIR nodejs 14 | RUN ./publish-layers.sh build-22 15 | 16 | FROM python:3.8 17 | 18 | RUN useradd -m newrelic-lambda-layers 19 | USER newrelic-lambda-layers 20 | WORKDIR /home/newrelic-lambda-layers 21 | RUN pip3 install -U awscli --user 22 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 23 | 24 | COPY libBuild.sh . 25 | COPY nodejs nodejs/ 26 | COPY --from=builder /home/newrelic-lambda-layers/nodejs/dist nodejs/dist 27 | 28 | WORKDIR nodejs 29 | CMD ./publish-layers.sh publish-22 30 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.ruby32: -------------------------------------------------------------------------------- 1 | FROM ruby:3.2 2 | 3 | RUN apt update 4 | 5 | # curl - downloads the prebuilt NR Lambda extension 6 | # python3/pip3 - for the AWS CLI 7 | # zip/unzip - for building AWS Lambda layers .zip files 8 | RUN apt install -y curl python3 python3-pip unzip zip 9 | 10 | RUN useradd -m newrelic-lambda-layers 11 | USER newrelic-lambda-layers 12 | WORKDIR /home/newrelic-lambda-layers 13 | RUN pip3 install -U awscli --user --no-cache-dir --break-system-packages 14 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 15 | 16 | # Ruby layer building depends on the shared `libBuild.sh` script but is 17 | # otherwise independent. Copy over only what we need to build Ruby layers. 18 | COPY --chown=newrelic-lambda-layers libBuild.sh . 19 | COPY --chown=newrelic-lambda-layers ruby ruby/ 20 | 21 | WORKDIR ruby 22 | 23 | # Run ruby/bin/clean just in case Docker is being ran from a developer's 24 | # workstation and their git clone directory has some stray files that could 25 | # conflict. 26 | RUN ./bin/clean 27 | 28 | CMD ./publish-layers.sh ruby3.2 29 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.ruby33: -------------------------------------------------------------------------------- 1 | FROM ruby:3.3 2 | 3 | RUN apt update 4 | 5 | # curl - downloads the prebuilt NR Lambda extension 6 | # python3/pip3 - for the AWS CLI 7 | # zip/unzip - for building AWS Lambda layers .zip files 8 | RUN apt install -y curl python3 python3-pip unzip zip 9 | 10 | RUN useradd -m newrelic-lambda-layers 11 | USER newrelic-lambda-layers 12 | WORKDIR /home/newrelic-lambda-layers 13 | RUN pip3 install -U awscli --user --no-cache-dir --break-system-packages 14 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 15 | 16 | # Ruby layer building depends on the shared `libBuild.sh` script but is 17 | # otherwise independent. Copy over only what we need to build Ruby layers. 18 | COPY --chown=newrelic-lambda-layers libBuild.sh . 19 | COPY --chown=newrelic-lambda-layers ruby ruby/ 20 | 21 | WORKDIR ruby 22 | 23 | # Run ruby/bin/clean just in case Docker is being ran from a developer's 24 | # workstation and their git clone directory has some stray files that could 25 | # conflict. 26 | RUN ./bin/clean 27 | 28 | CMD ./publish-layers.sh ruby3.3 29 | -------------------------------------------------------------------------------- /dockerfiles/Dockerfile.ruby34: -------------------------------------------------------------------------------- 1 | FROM ruby:3.4 2 | 3 | RUN apt update 4 | 5 | # curl - downloads the prebuilt NR Lambda extension 6 | # python3/pip3 - for the AWS CLI 7 | # zip/unzip - for building AWS Lambda layers .zip files 8 | RUN apt install -y curl python3 python3-pip unzip zip 9 | 10 | RUN useradd -m newrelic-lambda-layers 11 | USER newrelic-lambda-layers 12 | WORKDIR /home/newrelic-lambda-layers 13 | RUN pip3 install -U awscli --user --no-cache-dir --break-system-packages 14 | ENV PATH /home/newrelic-lambda-layers/.local/bin/:$PATH 15 | 16 | # Ruby layer building depends on the shared `libBuild.sh` script but is 17 | # otherwise independent. Copy over only what we need to build Ruby layers. 18 | COPY --chown=newrelic-lambda-layers libBuild.sh . 19 | COPY --chown=newrelic-lambda-layers ruby ruby/ 20 | 21 | WORKDIR ruby 22 | 23 | # Run ruby/bin/clean just in case Docker is being ran from a developer's 24 | # workstation and their git clone directory has some stray files that could 25 | # conflict. 26 | RUN ./bin/clean 27 | 28 | CMD ./publish-layers.sh ruby3.4 29 | -------------------------------------------------------------------------------- /dotnet/publish-layers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -Eeuo pipefail 4 | 5 | BUILD_DIR=lib # for .net can either be lib or bin. See: https://docs.aws.amazon.com/lambda/latest/dg/packaging-layers.html 6 | DIST_DIR=dist 7 | 8 | DOTNET_DIST_ARM64=$DIST_DIR/dotnet.arm64.zip 9 | DOTNET_DIST_X86_64=$DIST_DIR/dotnet.x86_64.zip 10 | 11 | AGENT_DIST_ZIP=agent.zip 12 | NEWRELIC_AGENT_VERSION="" 13 | VERSION_REGEX="v?([0-9]+\.[0-9]+\.[0-9]+)\.[0-9]+_dotnet" 14 | 15 | source ../libBuild.sh 16 | 17 | function usage { 18 | echo "./publish-layers.sh" 19 | } 20 | 21 | function build-dotnet-x86-64 { 22 | echo "Building New Relic layer for .NET 6, 7 and 8 (x86_64)" 23 | rm -rf $BUILD_DIR $DOTNET_DIST_X86_64 24 | mkdir -p $DIST_DIR 25 | get_agent amd64 26 | # MAKE CONFIG CHANGES HERE 27 | download_extension x86_64 28 | zip -rq $DOTNET_DIST_X86_64 $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 29 | rm -rf $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 30 | echo "Build complete: ${DOTNET_DIST_X86_64}" 31 | } 32 | 33 | function publish-dotnet-x86-64 { 34 | if [ ! -f $DOTNET_DIST_X86_64 ]; then 35 | echo "Package not found: ${DOTNET_DIST_X86_64}" 36 | exit 1 37 | fi 38 | 39 | for region in "${REGIONS_X86[@]}"; do 40 | publish_layer $DOTNET_DIST_X86_64 $region dotnet x86_64 $NEWRELIC_AGENT_VERSION 41 | done 42 | 43 | publish_docker_ecr $DOTNET_DIST_X86_64 dotnet x86_64 44 | } 45 | 46 | function build-dotnet-arm64 { 47 | echo "Building New Relic layer for .NET 6, 7 and 8 (ARM64)" 48 | rm -rf $BUILD_DIR $DOTNET_DIST_ARM64 49 | mkdir -p $DIST_DIR 50 | get_agent arm64 51 | # MAKE CONFIG CHANGES HERE 52 | download_extension arm64 53 | zip -rq $DOTNET_DIST_ARM64 $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 54 | rm -rf $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 55 | echo "Build complete: ${DOTNET_DIST_ARM64}" 56 | } 57 | 58 | function publish-dotnet-arm64 { 59 | if [ ! -f $DOTNET_DIST_ARM64 ]; then 60 | echo "Package not found: ${DOTNET_DIST_ARM64}" 61 | exit 1 62 | fi 63 | 64 | for region in "${REGIONS_ARM[@]}"; do 65 | publish_layer $DOTNET_DIST_ARM64 $region dotnet arm64 $NEWRELIC_AGENT_VERSION 66 | done 67 | 68 | publish_docker_ecr $DOTNET_DIST_ARM64 dotnet arm64 69 | } 70 | 71 | # exmaple https://download.newrelic.com/dot_net_agent/latest_release/newrelic-dotnet-agent_amd64.tar.gz 72 | function get_agent { 73 | arch=$1 74 | 75 | # Determine agent version from git tag 76 | if [[ -z "${GITHUB_REF_NAME}" ]]; then 77 | echo "Unable to determine agent version, GITHUB_REF_NAME environment variable not set." >&2 78 | exit 1; 79 | elif [[ "${GITHUB_REF_NAME}" =~ ${VERSION_REGEX} ]]; then 80 | # Extract the version number from the GITHUB_REF_NAME using regex 81 | NEWRELIC_AGENT_VERSION="${BASH_REMATCH[1]}" 82 | echo "Detected NEWRELIC_DOTNET_AGENT_VERSION: ${NEWRELIC_AGENT_VERSION}" 83 | else 84 | echo "Unable to determine Dotnet agent version, GITHUB_REF_NAME environment variable did not match regex. GITHUB_REF_NAME: ${GITHUB_REF_NAME}" >&2 85 | exit 1; 86 | fi 87 | 88 | url="https://download.newrelic.com/dot_net_agent/latest_release/newrelic-dotnet-agent_${NEWRELIC_AGENT_VERSION}_${arch}.tar.gz" 89 | rm -rf $AGENT_DIST_ZIP 90 | curl -L $url -o $AGENT_DIST_ZIP 91 | mkdir -p $BUILD_DIR 92 | tar -xvf $AGENT_DIST_ZIP -C ./$BUILD_DIR # under $BUILD_DIR/newrelic-dotnet-agent 93 | rm -f $AGENT_DIST_ZIP 94 | } 95 | 96 | 97 | build-dotnet-arm64 98 | publish-dotnet-arm64 99 | build-dotnet-x86-64 100 | publish-dotnet-x86-64 101 | 102 | -------------------------------------------------------------------------------- /extension/publish-layer.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -Eeuo pipefail 4 | 5 | EXTENSION_DIST_ZIP_ARM64=extension.arm64.zip 6 | EXTENSION_DIST_ZIP_X86_64=extension.x86_64.zip 7 | 8 | source ../libBuild.sh 9 | 10 | function build-layer-x86 { 11 | echo "Building New Relic Lambda Extension Layer (x86_64)" 12 | rm -f $EXTENSION_DIST_ZIP_X86_64 $EXTENSION_DIST_ZIP 13 | 14 | fetch_extension x86_64 15 | mv $EXTENSION_DIST_ZIP $EXTENSION_DIST_ZIP_X86_64 16 | echo "Build complete: ${EXTENSION_DIST_ZIP_X86_64}" 17 | } 18 | 19 | function build-layer-arm64 { 20 | echo "Building New Relic Lambda Extension Layer (arm64)" 21 | rm -f $EXTENSION_DIST_ZIP_ARM64 $EXTENSION_DIST_ZIP 22 | 23 | fetch_extension arm64 24 | mv $EXTENSION_DIST_ZIP $EXTENSION_DIST_ZIP_ARM64 25 | echo "Build complete: ${EXTENSION_DIST_ZIP_ARM64}" 26 | } 27 | 28 | function publish-layer-x86 { 29 | if [ ! -f $EXTENSION_DIST_ZIP_X86_64 ]; then 30 | echo "Package not found: ${EXTENSION_DIST_ZIP_X86_64}" 31 | exit 1 32 | fi 33 | 34 | for region in "${REGIONS_X86[@]}"; do 35 | publish_layer $EXTENSION_DIST_ZIP_X86_64 $region provided x86_64 provided 36 | done 37 | } 38 | 39 | function publish-layer-arm64 { 40 | if [ ! -f $EXTENSION_DIST_ZIP_ARM64 ]; then 41 | echo "Package not found: ${EXTENSION_DIST_ZIP_ARM64}" 42 | exit 1 43 | fi 44 | 45 | for region in "${REGIONS_ARM[@]}"; do 46 | publish_layer $EXTENSION_DIST_ZIP_ARM64 $region provided arm64 provided 47 | done 48 | } 49 | 50 | build-layer-x86 51 | publish-layer-x86 52 | publish_docker_ecr $EXTENSION_DIST_ZIP_X86_64 extension x86_64 53 | 54 | build-layer-arm64 55 | publish-layer-arm64 56 | publish_docker_ecr $EXTENSION_DIST_ZIP_ARM64 extension arm64 57 | -------------------------------------------------------------------------------- /java/.dockerignore: -------------------------------------------------------------------------------- 1 | .gradle 2 | .idea 3 | build 4 | -------------------------------------------------------------------------------- /java/.gitignore: -------------------------------------------------------------------------------- 1 | .gradle 2 | .idea 3 | build 4 | bin 5 | extensions 6 | gradlew.bat 7 | preview-extensions* 8 | -------------------------------------------------------------------------------- /java/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java-library' 3 | } 4 | 5 | repositories { 6 | mavenCentral() 7 | } 8 | 9 | dependencies { 10 | implementation("io.opentracing:opentracing-util:0.33.0") 11 | implementation("com.newrelic.opentracing:newrelic-java-lambda:2.2.4") 12 | implementation("com.newrelic.opentracing:java-aws-lambda:3.0.0") 13 | implementation("com.amazonaws:aws-lambda-java-events:3.15.0") 14 | implementation("com.amazonaws:aws-lambda-java-core:1.2.3") 15 | implementation("com.amazonaws:aws-lambda-java-serialization:1.0.0") 16 | implementation("com.fasterxml.jackson.core:jackson-databind:2.12.7.1") 17 | implementation("com.fasterxml.jackson.datatype:jackson-datatype-joda:2.12.2") 18 | implementation('commons-logging:commons-logging:1.3.4') 19 | 20 | testImplementation("junit:junit:4.13.1") 21 | testImplementation('com.github.stefanbirkner:system-lambda:1.2.0') 22 | testImplementation('org.mockito:mockito-core:3.12.4') 23 | } 24 | 25 | def extensions = copySpec { 26 | from('extensions') 27 | into('extensions') 28 | } 29 | 30 | task packageLayer(type: Zip) { 31 | archiveBaseName = 'NewRelicJavaLayer' 32 | into('java/lib') { 33 | from configurations.runtimeClasspath 34 | } 35 | into('java/lib') { 36 | from fileTree('build/libs') { 37 | include 'NewRelicJavaLayer.jar' 38 | } 39 | } 40 | with extensions 41 | } 42 | 43 | if (hasProperty('javaVersion')) { 44 | switch(project.getProperty('javaVersion')) { 45 | case "8": 46 | ext.javaVersion = JavaVersion.VERSION_1_8 47 | break 48 | case "11": 49 | ext.javaVersion = JavaVersion.VERSION_11 50 | break 51 | case "17": 52 | ext.javaVersion = JavaVersion.VERSION_17 53 | break 54 | case "21": 55 | ext.javaVersion = JavaVersion.VERSION_21 56 | break 57 | default: 58 | logger.lifecycle("Unrecognized javaVersion. Using 8.") 59 | ext.javaVersion = JavaVersion.VERSION_1_8 60 | } 61 | } else { 62 | ext.javaVersion = JavaVersion.VERSION_1_8 63 | } 64 | logger.lifecycle("Using javaVersion: ${ext.javaVersion}") 65 | 66 | java { 67 | sourceCompatibility = javaVersion 68 | targetCompatibility = javaVersion 69 | } 70 | 71 | test { 72 | if (javaVersion != JavaVersion.VERSION_1_8) { 73 | jvmArgs = ['--add-opens', 'java.base/java.lang=ALL-UNNAMED', '--add-opens', 'java.base/java.util=ALL-UNNAMED'] 74 | } 75 | } 76 | 77 | packageLayer.dependsOn build 78 | -------------------------------------------------------------------------------- /java/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/newrelic/newrelic-lambda-layers/eb15037c4b4f3f238c122b03cb427c1ed8588c4b/java/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /java/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip 4 | networkTimeout=10000 5 | zipStoreBase=GRADLE_USER_HOME 6 | zipStorePath=wrapper/dists 7 | -------------------------------------------------------------------------------- /java/gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | # This is normally unused 84 | # shellcheck disable=SC2034 85 | APP_BASE_NAME=${0##*/} 86 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit 87 | 88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 90 | 91 | # Use the maximum available, or set MAX_FD != -1 to use that value. 92 | MAX_FD=maximum 93 | 94 | warn () { 95 | echo "$*" 96 | } >&2 97 | 98 | die () { 99 | echo 100 | echo "$*" 101 | echo 102 | exit 1 103 | } >&2 104 | 105 | # OS specific support (must be 'true' or 'false'). 106 | cygwin=false 107 | msys=false 108 | darwin=false 109 | nonstop=false 110 | case "$( uname )" in #( 111 | CYGWIN* ) cygwin=true ;; #( 112 | Darwin* ) darwin=true ;; #( 113 | MSYS* | MINGW* ) msys=true ;; #( 114 | NONSTOP* ) nonstop=true ;; 115 | esac 116 | 117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 118 | 119 | 120 | # Determine the Java command to use to start the JVM. 121 | if [ -n "$JAVA_HOME" ] ; then 122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 123 | # IBM's JDK on AIX uses strange locations for the executables 124 | JAVACMD=$JAVA_HOME/jre/sh/java 125 | else 126 | JAVACMD=$JAVA_HOME/bin/java 127 | fi 128 | if [ ! -x "$JAVACMD" ] ; then 129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 130 | 131 | Please set the JAVA_HOME variable in your environment to match the 132 | location of your Java installation." 133 | fi 134 | else 135 | JAVACMD=java 136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | 142 | # Increase the maximum file descriptors if we can. 143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 144 | case $MAX_FD in #( 145 | max*) 146 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 147 | # shellcheck disable=SC3045 148 | MAX_FD=$( ulimit -H -n ) || 149 | warn "Could not query maximum file descriptor limit" 150 | esac 151 | case $MAX_FD in #( 152 | '' | soft) :;; #( 153 | *) 154 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 155 | # shellcheck disable=SC3045 156 | ulimit -n "$MAX_FD" || 157 | warn "Could not set maximum file descriptor limit to $MAX_FD" 158 | esac 159 | fi 160 | 161 | # Collect all arguments for the java command, stacking in reverse order: 162 | # * args from the command line 163 | # * the main class name 164 | # * -classpath 165 | # * -D...appname settings 166 | # * --module-path (only if needed) 167 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 168 | 169 | # For Cygwin or MSYS, switch paths to Windows format before running java 170 | if "$cygwin" || "$msys" ; then 171 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 172 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 173 | 174 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 175 | 176 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 177 | for arg do 178 | if 179 | case $arg in #( 180 | -*) false ;; # don't mess with options #( 181 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 182 | [ -e "$t" ] ;; #( 183 | *) false ;; 184 | esac 185 | then 186 | arg=$( cygpath --path --ignore --mixed "$arg" ) 187 | fi 188 | # Roll the args list around exactly as many times as the number of 189 | # args, so each arg winds up back in the position where it started, but 190 | # possibly modified. 191 | # 192 | # NB: a `for` loop captures its iteration list before it begins, so 193 | # changing the positional parameters here affects neither the number of 194 | # iterations, nor the values presented in `arg`. 195 | shift # remove old arg 196 | set -- "$@" "$arg" # push replacement arg 197 | done 198 | fi 199 | 200 | # Collect all arguments for the java command; 201 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of 202 | # shell script including quotes and variable substitutions, so put them in 203 | # double quotes to make sure that they get re-expanded; and 204 | # * put everything else in single quotes, so that it's not re-expanded. 205 | 206 | set -- \ 207 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 208 | -classpath "$CLASSPATH" \ 209 | org.gradle.wrapper.GradleWrapperMain \ 210 | "$@" 211 | 212 | # Stop when "xargs" is not available. 213 | if ! command -v xargs >/dev/null 2>&1 214 | then 215 | die "xargs is not available" 216 | fi 217 | 218 | # Use "xargs" to parse quoted args. 219 | # 220 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 221 | # 222 | # In Bash we could simply go: 223 | # 224 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 225 | # set -- "${ARGS[@]}" "$@" 226 | # 227 | # but POSIX shell has neither arrays nor command substitution, so instead we 228 | # post-process each arg (as a line of input to sed) to backslash-escape any 229 | # character that might be a shell metacharacter, then use eval to reverse 230 | # that process (while maintaining the separation between arguments), and wrap 231 | # the whole thing up as a single "set" statement. 232 | # 233 | # This will of course break if any of these variables contains a newline or 234 | # an unmatched quote. 235 | # 236 | 237 | eval "set -- $( 238 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 239 | xargs -n1 | 240 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 241 | tr '\n' ' ' 242 | )" '"$@"' 243 | 244 | exec "$JAVACMD" "$@" 245 | -------------------------------------------------------------------------------- /java/publish-layers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -Eeuo pipefail 4 | 5 | BUILD_DIR=build 6 | GRADLE_ARCHIVE=$BUILD_DIR/distributions/NewRelicJavaLayer.zip 7 | 8 | DIST_DIR=dist 9 | JAVA8_DIST_ARM64=$DIST_DIR/java8.arm64.zip 10 | JAVA8_DIST_X86_64=$DIST_DIR/java8.x86_64.zip 11 | JAVA11_DIST_ARM64=$DIST_DIR/java11.arm64.zip 12 | JAVA11_DIST_X86_64=$DIST_DIR/java11.x86_64.zip 13 | JAVA17_DIST_ARM64=$DIST_DIR/java17.arm64.zip 14 | JAVA17_DIST_X86_64=$DIST_DIR/java17.x86_64.zip 15 | JAVA21_DIST_ARM64=$DIST_DIR/java21.arm64.zip 16 | JAVA21_DIST_X86_64=$DIST_DIR/java21.x86_64.zip 17 | 18 | source ../libBuild.sh 19 | 20 | function usage { 21 | echo "./publish-layers.sh [java8al2, java11, java17, java21]" 22 | } 23 | 24 | function build-arm() { 25 | platform=$1 26 | javaVersion=$2 27 | target=$3 28 | 29 | echo "Building New Relic layer for ${platform}" 30 | rm -rf $BUILD_DIR $target 31 | download_extension arm64 32 | ./gradlew packageLayer -P javaVersion=$javaVersion 33 | mkdir -p $DIST_DIR 34 | cp $GRADLE_ARCHIVE $target 35 | rm -rf $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 36 | echo "Build complete" 37 | } 38 | 39 | function build-x86() { 40 | platform=$1 41 | javaVersion=$2 42 | target=$3 43 | 44 | echo "Building New Relic layer for ${platform}" 45 | rm -rf $BUILD_DIR $target 46 | download_extension x86_64 47 | ./gradlew packageLayer -P javaVersion=$javaVersion 48 | mkdir -p $DIST_DIR 49 | cp $GRADLE_ARCHIVE $target 50 | rm -rf $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 51 | echo "Build complete" 52 | } 53 | 54 | function build-java8al2-arm64 { 55 | build-arm "java8.al2 (arm64)" 8 $JAVA8_DIST_ARM64 56 | } 57 | 58 | function build-java8al2-x86 { 59 | build-x86 "java8.al2 (x86_64)" 8 $JAVA8_DIST_X86_64 60 | } 61 | 62 | function publish-java8al2-arm64 { 63 | if [ ! -f $JAVA8_DIST_ARM64 ]; then 64 | echo "Package not found" 65 | exit 1 66 | fi 67 | 68 | for region in "${REGIONS_ARM[@]}"; do 69 | publish_layer $JAVA8_DIST_ARM64 $region java8.al2 arm64 70 | done 71 | } 72 | 73 | function publish-java8al2-x86 { 74 | if [ ! -f $JAVA8_DIST_X86_64 ]; then 75 | echo "Package not found" 76 | exit 1 77 | fi 78 | 79 | for region in "${REGIONS_X86[@]}"; do 80 | publish_layer $JAVA8_DIST_X86_64 $region java8.al2 x86_64 81 | done 82 | } 83 | 84 | function build-java11-arm64 { 85 | build-arm "java11 (arm64)" 11 $JAVA11_DIST_ARM64 86 | } 87 | 88 | function build-java11-x86 { 89 | build-x86 "java11 (x86_64)" 11 $JAVA11_DIST_X86_64 90 | } 91 | 92 | function publish-java11-arm64 { 93 | if [ ! -f $JAVA11_DIST_ARM64 ]; then 94 | echo "Package not found" 95 | exit 1 96 | fi 97 | 98 | for region in "${REGIONS_ARM[@]}"; do 99 | publish_layer $JAVA11_DIST_ARM64 $region java11 arm64 100 | done 101 | } 102 | 103 | function publish-java11-x86 { 104 | if [ ! -f $JAVA11_DIST_X86_64 ]; then 105 | echo "Package not found" 106 | exit 1 107 | fi 108 | 109 | for region in "${REGIONS_X86[@]}"; do 110 | publish_layer $JAVA11_DIST_X86_64 $region java11 x86_64 111 | done 112 | } 113 | 114 | function build-java17-arm64 { 115 | build-arm "java17 (arm64)" 17 $JAVA17_DIST_ARM64 116 | } 117 | 118 | function build-java17-x86 { 119 | build-x86 "java17 (x86_64)" 17 $JAVA17_DIST_X86_64 120 | } 121 | 122 | function publish-java17-arm64 { 123 | if [ ! -f $JAVA17_DIST_ARM64 ]; then 124 | echo "Package not found" 125 | exit 1 126 | fi 127 | 128 | for region in "${REGIONS_ARM[@]}"; do 129 | publish_layer $JAVA17_DIST_ARM64 $region java17 arm64 130 | done 131 | } 132 | 133 | function publish-java17-x86 { 134 | if [ ! -f $JAVA17_DIST_X86_64 ]; then 135 | echo "Package not found" 136 | exit 1 137 | fi 138 | 139 | for region in "${REGIONS_X86[@]}"; do 140 | publish_layer $JAVA17_DIST_X86_64 $region java17 x86_64 141 | done 142 | } 143 | 144 | function build-java21-arm64 { 145 | build-arm "java21 (arm64)" 21 $JAVA21_DIST_ARM64 146 | } 147 | 148 | function build-java21-x86 { 149 | build-x86 "java21 (x86_64)" 21 $JAVA21_DIST_X86_64 150 | } 151 | 152 | function publish-java21-arm64 { 153 | if [ ! -f $JAVA21_DIST_ARM64 ]; then 154 | echo "Package not found" 155 | exit 1 156 | fi 157 | 158 | for region in "${REGIONS_ARM[@]}"; do 159 | publish_layer $JAVA21_DIST_ARM64 $region java21 arm64 160 | done 161 | } 162 | 163 | function publish-java21-x86 { 164 | if [ ! -f $JAVA21_DIST_X86_64 ]; then 165 | echo "Package not found" 166 | exit 1 167 | fi 168 | 169 | for region in "${REGIONS_X86[@]}"; do 170 | publish_layer $JAVA21_DIST_X86_64 $region java21 x86_64 171 | done 172 | } 173 | 174 | case "$1" in 175 | "build-java8al2") 176 | build-java8al2-arm64 177 | build-java8al2-x86 178 | ;; 179 | "publish-java8al2") 180 | publish-java8al2-arm64 181 | publish-java8al2-x86 182 | ;; 183 | "build-java11") 184 | build-java11-arm64 185 | build-java11-x86 186 | ;; 187 | "publish-java11") 188 | publish-java11-arm64 189 | publish-java11-x86 190 | ;; 191 | "build-java17") 192 | build-java17-arm64 193 | build-java17-x86 194 | ;; 195 | "publish-java17") 196 | publish-java17-arm64 197 | publish-java17-x86 198 | ;; 199 | "build-java21") 200 | build-java21-arm64 201 | build-java21-x86 202 | ;; 203 | "publish-java21") 204 | publish-java21-arm64 205 | publish-java21-x86 206 | ;; 207 | "build-publish-java8al2-ecr-image") 208 | build-java8al2-arm64 209 | publish_docker_ecr $JAVA8_DIST_ARM64 java8 arm64 210 | build-java8al2-x86 211 | publish_docker_ecr $JAVA8_DIST_X86_64 java8 x86_64 212 | ;; 213 | "build-publish-java11-ecr-image") 214 | build-java11-arm64 215 | publish_docker_ecr $JAVA11_DIST_ARM64 java11 arm64 216 | build-java11-x86 217 | publish_docker_ecr $JAVA11_DIST_X86_64 java11 x86_64 218 | ;; 219 | "build-publish-java17-ecr-image") 220 | build-java17-arm64 221 | publish_docker_ecr $JAVA17_DIST_ARM64 java17 arm64 222 | build-java17-x86 223 | publish_docker_ecr $JAVA17_DIST_X86_64 java17 x86_64 224 | ;; 225 | "build-publish-java21-ecr-image") 226 | build-java21-arm64 227 | publish_docker_ecr $JAVA21_DIST_ARM64 java21 arm64 228 | build-java21-x86 229 | publish_docker_ecr $JAVA21_DIST_X86_64 java21 x86_64 230 | ;; 231 | "java8al2") 232 | $0 build-java8al2 233 | $0 publish-java8al2 234 | ;; 235 | "java11") 236 | $0 build-java11 237 | $0 publish-java11 238 | ;; 239 | "java17") 240 | $0 build-java17 241 | $0 publish-java17 242 | ;; 243 | "java21") 244 | $0 build-java21 245 | $0 publish-java21 246 | ;; 247 | *) 248 | usage 249 | ;; 250 | esac 251 | -------------------------------------------------------------------------------- /java/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'NewRelicJavaLayer' 2 | -------------------------------------------------------------------------------- /java/src/main/java/com/newrelic/java/HandlerWrapper.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | import com.amazonaws.services.lambda.runtime.RequestHandler; 5 | import com.amazonaws.services.lambda.runtime.RequestStreamHandler; 6 | import com.newrelic.opentracing.LambdaTracer; 7 | import com.newrelic.opentracing.aws.LambdaTracing; 8 | import com.newrelic.opentracing.aws.StreamLambdaTracing; 9 | import io.opentracing.Tracer; 10 | import io.opentracing.util.GlobalTracer; 11 | 12 | import java.io.IOException; 13 | import java.io.InputStream; 14 | import java.io.OutputStream; 15 | 16 | public class HandlerWrapper { 17 | 18 | public static final String HANDLER_ENV_VAR = "NEW_RELIC_LAMBDA_HANDLER"; 19 | 20 | private static RequestHandler requestHandler; 21 | private static RequestStreamHandler requestStreamHandler; 22 | 23 | static { 24 | // Obtain an instance of the OpenTracing Tracer of your choice 25 | Tracer tracer = LambdaTracer.INSTANCE; 26 | // Register your tracer as the Global Tracer 27 | GlobalTracer.registerIfAbsent(tracer); 28 | 29 | // Set up handlers 30 | setupHandlers(); 31 | } 32 | 33 | static void setupHandlers() { 34 | String handler = System.getenv(HANDLER_ENV_VAR); 35 | String[] parts = handler.split("::"); 36 | String handlerClass = parts[0]; 37 | String handlerMethod = parts.length == 2 ? parts[1] : "handleRequest"; 38 | 39 | try { 40 | Class loadedClass = JavaClassLoader.class.getClassLoader().loadClass(handlerClass); 41 | 42 | boolean isRequestStreamHandler = RequestStreamHandler.class.isAssignableFrom(loadedClass); 43 | requestHandler = isRequestStreamHandler 44 | ? (input, context) -> { 45 | throw new IllegalStateException("" + handlerClass + " is RequestStreamHandler, use handleStreamsRequest instead"); 46 | } 47 | : JavaClassLoader.initializeRequestHandler(loadedClass, handlerMethod); 48 | requestStreamHandler = isRequestStreamHandler 49 | ? (RequestStreamHandler) loadedClass.getDeclaredConstructor().newInstance() 50 | : (input, output, context) -> { 51 | throw new IllegalStateException("" + handlerClass + " is not RequestStreamHandler, use handleRequest instead"); 52 | }; 53 | 54 | } catch (ReflectiveOperationException e) { 55 | throw new RuntimeException("Error occurred during initialization of javaClassLoader:", e); 56 | } 57 | } 58 | 59 | public Object handleRequest(Object input, Context context) { 60 | return LambdaTracing.instrument( 61 | input, 62 | context, 63 | requestHandler::handleRequest 64 | ); 65 | } 66 | 67 | public void handleStreamsRequest(InputStream input, OutputStream output, Context context) throws IOException { 68 | StreamLambdaTracing.instrument( 69 | input, 70 | output, 71 | context, 72 | requestStreamHandler 73 | ); 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /java/src/main/java/com/newrelic/java/JavaClassLoader.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | import com.amazonaws.services.lambda.runtime.RequestHandler; 5 | import com.amazonaws.services.lambda.runtime.serialization.PojoSerializer; 6 | import com.amazonaws.services.lambda.runtime.serialization.events.LambdaEventSerializers; 7 | import com.fasterxml.jackson.core.JsonProcessingException; 8 | import com.fasterxml.jackson.databind.DeserializationFeature; 9 | import com.fasterxml.jackson.databind.MapperFeature; 10 | import com.fasterxml.jackson.databind.ObjectMapper; 11 | import com.fasterxml.jackson.datatype.joda.JodaModule; 12 | 13 | import java.lang.invoke.MethodHandle; 14 | import java.lang.invoke.MethodHandles; 15 | import java.lang.invoke.MethodType; 16 | import java.lang.reflect.Method; 17 | 18 | public class JavaClassLoader implements RequestHandler { 19 | 20 | private interface UnsafeHandler { 21 | 22 | Object handle(Object input, Context context) throws Throwable; 23 | 24 | } 25 | 26 | private final ObjectMapper mapper = new ObjectMapper().configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true) 27 | .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) 28 | .enable(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS) 29 | .registerModule(new JodaModule()); 30 | 31 | private final Class inputType; 32 | private final UnsafeHandler executor; 33 | 34 | static JavaClassLoader initializeRequestHandler(Class loadedClass, String methodName) throws ReflectiveOperationException { 35 | Class methodReturnType = Object.class; 36 | Class methodInputType = null; 37 | Class methodContextType = null; 38 | int numberOfArguments = 0; 39 | for (Method method : loadedClass.getMethods()) { 40 | if (isUserHandlerMethod(method, methodName, loadedClass)) { 41 | methodReturnType = method.getReturnType(); 42 | numberOfArguments = method.getParameterTypes().length; 43 | if (numberOfArguments == 1) { 44 | methodInputType = method.getParameterTypes()[0]; 45 | } else if (numberOfArguments == 2) { 46 | methodInputType = method.getParameterTypes()[0]; 47 | methodContextType = method.getParameterTypes()[1]; 48 | } 49 | break; 50 | } 51 | } 52 | 53 | Object classInstance = loadedClass.getDeclaredConstructor().newInstance(); 54 | 55 | MethodHandle methodHandle = MethodHandles.publicLookup().findVirtual( 56 | loadedClass, 57 | methodName, 58 | getMethodType(methodReturnType, methodInputType, methodContextType) 59 | ).bindTo(classInstance); 60 | 61 | return new JavaClassLoader(methodInputType, methodHandle, numberOfArguments); 62 | } 63 | 64 | private static MethodType getMethodType(Class methodReturnType, Class methodInputType, Class methodContextType) { 65 | if (methodInputType == null) { 66 | return MethodType.methodType(methodReturnType); 67 | } else if (methodContextType == null) { 68 | return MethodType.methodType(methodReturnType, methodInputType); 69 | } 70 | return MethodType.methodType(methodReturnType, methodInputType, methodContextType); 71 | } 72 | 73 | // RequestHandler implementation constructor 74 | private JavaClassLoader(Class inputType, MethodHandle methodHandle, int numberOfArguments) { 75 | this.inputType = inputType; 76 | if (numberOfArguments == 0) { 77 | this.executor = (input, context) -> methodHandle.invoke(); 78 | } else if (numberOfArguments == 1) { 79 | this.executor = (input, context) -> methodHandle.invokeWithArguments(input); 80 | } else { 81 | this.executor = methodHandle::invokeWithArguments; 82 | } 83 | } 84 | 85 | @Override 86 | public Object handleRequest(Object inputParam, Context contextParam) { 87 | try { 88 | return executor.handle(mappingInputToHandlerType(inputParam, inputType), contextParam); 89 | } catch (Throwable e) { 90 | throw new RuntimeException("Error occurred while invoking handler method: " + e, e); 91 | } 92 | } 93 | 94 | private static boolean isUserHandlerMethod(Method method, String methodName, Class loadedClass) { 95 | if (!method.getDeclaringClass().isAssignableFrom(loadedClass)) { 96 | return false; 97 | } 98 | 99 | if (!method.getName().equals(methodName)) { 100 | return false; 101 | } 102 | 103 | if (method.isBridge() || method.isSynthetic()) { 104 | return false; 105 | } 106 | 107 | if (method.getParameterTypes().length <= 1) { 108 | return true; 109 | } 110 | 111 | return method.getParameterTypes().length == 2 && ( 112 | method.getParameterTypes()[1].isAssignableFrom(Context.class) || Object.class.equals(method.getParameterTypes()[1]) 113 | ); 114 | } 115 | 116 | private Object mappingInputToHandlerType(Object inputParam, Class inputType) throws JsonProcessingException { 117 | if (inputType == null || inputType.isAssignableFrom(Number.class) || inputType.isAssignableFrom(String.class)) { 118 | return inputParam; 119 | } else if (LambdaEventSerializers.isLambdaSupportedEvent(inputType.getName())) { 120 | PojoSerializer serializer = LambdaEventSerializers.serializerFor(inputType, JavaClassLoader.class.getClassLoader()); 121 | String inputParamString = mapper.writeValueAsString(inputParam); 122 | return serializer.fromJson(inputParamString); 123 | } 124 | return inputParam instanceof CharSequence ? mapper.readValue(inputParam.toString(), inputType) : mapper.convertValue(inputParam, inputType); 125 | } 126 | } 127 | 128 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/BiFunctionHandlerWithObjectInput.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | 5 | import java.util.function.BiFunction; 6 | 7 | public class BiFunctionHandlerWithObjectInput implements BiFunction { 8 | 9 | public static final String RESPONSE_PREFIX = "Hello "; 10 | 11 | @Override 12 | public String apply(Input s, Context context) { 13 | return RESPONSE_PREFIX + s.getMessage(); 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/BiFunctionHandlerWithStringInput.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | 5 | import java.util.function.BiFunction; 6 | 7 | public class BiFunctionHandlerWithStringInput implements BiFunction { 8 | 9 | public static final String RESPONSE_PREFIX = "Hello "; 10 | 11 | @Override 12 | public String apply(String s, Context context) { 13 | return RESPONSE_PREFIX + s; 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/FunctionHandlerWithObjectInput.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import java.util.function.Function; 4 | 5 | public class FunctionHandlerWithObjectInput implements Function { 6 | 7 | public static final String RESPONSE_PREFIX = "Hello "; 8 | 9 | @Override 10 | public String apply(Input s) { 11 | return RESPONSE_PREFIX + s.getMessage(); 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/FunctionHandlerWithStringInput.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import java.util.function.Function; 4 | 5 | public class FunctionHandlerWithStringInput implements Function { 6 | 7 | public static final String RESPONSE_PREFIX = "Hello "; 8 | 9 | @Override 10 | public String apply(String s) { 11 | return RESPONSE_PREFIX + s; 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/HandlerWrapperTest.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | import com.github.stefanbirkner.systemlambda.SystemLambda; 5 | import org.junit.Test; 6 | import org.mockito.Mockito; 7 | 8 | import java.io.InputStream; 9 | import java.io.OutputStream; 10 | 11 | import static org.junit.Assert.assertEquals; 12 | import static org.junit.Assert.assertThrows; 13 | 14 | public class HandlerWrapperTest { 15 | 16 | @Test 17 | public void testRequestHandler() throws Exception { 18 | SystemLambda.withEnvironmentVariable(HandlerWrapper.HANDLER_ENV_VAR, RequestHandlerWithObjectInput.class.getName()) 19 | .execute(() -> { 20 | HandlerWrapper.setupHandlers(); 21 | assertEquals("Hello World", new HandlerWrapper().handleRequest(Input.create("World"), Mockito.mock(Context.class))); 22 | }); 23 | } 24 | 25 | @Test 26 | public void testRequestWrongHandler() throws Exception { 27 | SystemLambda.withEnvironmentVariable(HandlerWrapper.HANDLER_ENV_VAR, RequestHandlerWithObjectInput.class.getName()) 28 | .execute(() -> { 29 | HandlerWrapper.setupHandlers(); 30 | assertThrows(RuntimeException.class, () -> 31 | new HandlerWrapper().handleStreamsRequest(Mockito.mock(InputStream.class), Mockito.mock(OutputStream.class), Mockito.mock(Context.class)) 32 | ); 33 | }); 34 | } 35 | 36 | @Test 37 | public void testRequestStreamHandler() throws Exception { 38 | SystemLambda.withEnvironmentVariable(HandlerWrapper.HANDLER_ENV_VAR, TestStreamingRequestHandler.class.getName()) 39 | .execute(() -> { 40 | HandlerWrapper.setupHandlers(); 41 | new HandlerWrapper().handleStreamsRequest(Mockito.mock(InputStream.class), Mockito.mock(OutputStream.class), Mockito.mock(Context.class)); 42 | }); 43 | } 44 | 45 | @Test 46 | public void testRequestWrongStreamHandler() throws Exception { 47 | SystemLambda.withEnvironmentVariable(HandlerWrapper.HANDLER_ENV_VAR, TestStreamingRequestHandler.class.getName()) 48 | .execute(() -> { 49 | HandlerWrapper.setupHandlers(); 50 | assertThrows(RuntimeException.class, () -> new HandlerWrapper().handleRequest(Input.create("World"), Mockito.mock(Context.class))); 51 | }); 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/Input.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | public class Input { 4 | 5 | public static Input create(String message) { 6 | Input input = new Input(); 7 | input.setMessage(message); 8 | return input; 9 | } 10 | 11 | private String message; 12 | 13 | public String getMessage() { 14 | return message; 15 | } 16 | 17 | public void setMessage(String message) { 18 | this.message = message; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/JavaClassLoaderTest.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | 6 | public class JavaClassLoaderTest { 7 | 8 | private static final String INPUT_AS_STRING = "{\"message\": \"World\"}"; 9 | private static final Input INPUT_AS_OBJECT = Input.create("World"); 10 | public static final String STRING_INPUT = "World"; 11 | 12 | @Test 13 | public void testFunctionWithObjectHandler() throws ReflectiveOperationException { 14 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(FunctionHandlerWithObjectInput.class, "apply"); 15 | Assert.assertEquals("Hello World", loader.handleRequest(INPUT_AS_OBJECT, null)); 16 | } 17 | 18 | @Test 19 | public void testFunctionWithObjectHandlerPassedAsJson() throws ReflectiveOperationException { 20 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(FunctionHandlerWithObjectInput.class, "apply"); 21 | Assert.assertEquals("Hello World", loader.handleRequest(INPUT_AS_STRING, null)); 22 | } 23 | 24 | @Test 25 | public void testFunctionWithStringHandler() throws ReflectiveOperationException { 26 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(FunctionHandlerWithStringInput.class, "apply"); 27 | Assert.assertEquals("Hello World", loader.handleRequest(STRING_INPUT, null)); 28 | } 29 | 30 | @Test 31 | public void testBiFunctionWithObjectHandler() throws ReflectiveOperationException { 32 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(BiFunctionHandlerWithObjectInput.class, "apply"); 33 | Assert.assertEquals("Hello World", loader.handleRequest(INPUT_AS_OBJECT, null)); 34 | } 35 | 36 | @Test 37 | public void testBiFunctionWithStringHandler() throws ReflectiveOperationException { 38 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(BiFunctionHandlerWithStringInput.class, "apply"); 39 | Assert.assertEquals("Hello World", loader.handleRequest(STRING_INPUT, null)); 40 | } 41 | 42 | @Test 43 | public void testWithObjectRequestHandler() throws ReflectiveOperationException { 44 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(RequestHandlerWithObjectInput.class, "handleRequest"); 45 | Assert.assertEquals("Hello World", loader.handleRequest(INPUT_AS_OBJECT, null)); 46 | } 47 | 48 | @Test 49 | public void testWithStringRequestHandler() throws ReflectiveOperationException { 50 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(RequestHandlerWithStringInput.class, "handleRequest"); 51 | Assert.assertEquals("Hello World", loader.handleRequest(STRING_INPUT, null)); 52 | } 53 | 54 | @Test 55 | public void testWithoutArgumentRequestHandler() throws ReflectiveOperationException { 56 | JavaClassLoader loader = JavaClassLoader.initializeRequestHandler(PojoHandlerWithNoArgument.class, "handleRequest"); 57 | Assert.assertEquals("Hello World!", loader.handleRequest(INPUT_AS_OBJECT, null)); 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/PojoHandlerWithNoArgument.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | public class PojoHandlerWithNoArgument { 4 | 5 | 6 | public String handleRequest() { 7 | return "Hello World!"; 8 | } 9 | 10 | } 11 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/RequestHandlerWithObjectInput.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | import com.amazonaws.services.lambda.runtime.RequestHandler; 5 | 6 | public class RequestHandlerWithObjectInput implements RequestHandler { 7 | 8 | public static final String RESPONSE_PREFIX = "Hello "; 9 | 10 | @Override 11 | public String handleRequest(Input s, Context context) { 12 | return RESPONSE_PREFIX + s.getMessage(); 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/RequestHandlerWithStringInput.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | import com.amazonaws.services.lambda.runtime.RequestHandler; 5 | 6 | public class RequestHandlerWithStringInput implements RequestHandler { 7 | 8 | public static final String RESPONSE_PREFIX = "Hello "; 9 | 10 | @Override 11 | public String handleRequest(String s, Context context) { 12 | return RESPONSE_PREFIX + s; 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /java/src/test/java/com/newrelic/java/TestStreamingRequestHandler.java: -------------------------------------------------------------------------------- 1 | package com.newrelic.java; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | import com.amazonaws.services.lambda.runtime.RequestStreamHandler; 5 | 6 | import java.io.IOException; 7 | import java.io.InputStream; 8 | import java.io.OutputStream; 9 | 10 | public class TestStreamingRequestHandler implements RequestStreamHandler { 11 | @Override 12 | public void handleRequest(InputStream input, OutputStream output, Context context) { 13 | // do nothing 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /nodejs/.c8rc.json: -------------------------------------------------------------------------------- 1 | { 2 | "reporter": ["lcov"], 3 | "all": true, 4 | "exclude": ["test/*", "coverage/*"] 5 | } -------------------------------------------------------------------------------- /nodejs/.eslintignore: -------------------------------------------------------------------------------- 1 | **/node_modules/** -------------------------------------------------------------------------------- /nodejs/.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es6": true, 4 | "node": true, 5 | "browser": false 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 12 9 | }, 10 | "rules": { 11 | "indent": ["warn", 2, {"SwitchCase": 1}], 12 | "brace-style": "error", 13 | "comma-dangle": "off", 14 | "comma-style": ["error", "last"], 15 | "consistent-return": "off", 16 | "curly": "off", 17 | "eol-last": "error", 18 | "eqeqeq": ["error", "smart"], 19 | "camelcase": ["off", {"properties": "never"}], 20 | "dot-notation": "error", 21 | "func-names": "error", 22 | "guard-for-in": "error", 23 | "key-spacing": ["off", { "beforeColon": false }], 24 | "max-len": ["error", 120, { "ignoreUrls": true }], 25 | "max-nested-callbacks": ["error", 3], 26 | "max-params": ["error", 5], 27 | "new-cap": "error", 28 | "no-console": "warn", 29 | "no-debugger": "error", 30 | "no-else-return": "error", 31 | "no-floating-decimal": "error", 32 | "no-lonely-if": "error", 33 | "no-mixed-requires": "error", 34 | "no-multiple-empty-lines": "error", 35 | "no-multi-spaces": ["off", { "ignoreEOLComments": true }], 36 | "no-new": "error", 37 | "no-new-func": "warn", 38 | "no-shadow": ["warn", {"allow": ["shim", "t"]}], 39 | "no-undef": "error", 40 | "no-unused-vars": "error", 41 | "no-use-before-define": ["off", {"functions": false}], 42 | "one-var": ["off", "never"], 43 | "padded-blocks": ["error", "never"], 44 | "radix": "error", 45 | "semi": ["error", "never"], 46 | "space-before-function-paren": ["error", "never"], 47 | "keyword-spacing": "error", 48 | "space-before-blocks": "error", 49 | "space-infix-ops": "error", 50 | "spaced-comment": "error", 51 | "space-unary-ops": "error", 52 | "strict": "error", 53 | "quote-props": [ "off", "consistent-as-needed" ], 54 | "quotes": ["off", "single"], 55 | "wrap-iife": "error" 56 | }, 57 | "overrides": [ 58 | { 59 | "files": ["test/*.tap.js"], 60 | "rules": { 61 | "no-shadow": ["warn", { "allow": ["t"] }] 62 | } 63 | }, 64 | { 65 | "files": ["test/integration/*.tap.js", "test/integration/*/*.tap.js", "test/integration/core/exec-me.js"], 66 | "rules": { 67 | "no-console": ["off"] 68 | } 69 | } 70 | ] 71 | } 72 | -------------------------------------------------------------------------------- /nodejs/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (https://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # TypeScript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | # next.js build output 61 | .next 62 | 63 | dist/ 64 | .env 65 | /.idea/.gitignore 66 | /.idea/mainland-layer.iml 67 | /.idea/misc.xml 68 | /.idea/modules.xml 69 | /.idea/vcs.xml 70 | -------------------------------------------------------------------------------- /nodejs/.npmrc-ci: -------------------------------------------------------------------------------- 1 | //registry.npmjs.org/:_authToken=${NPM_TOKEN} 2 | -------------------------------------------------------------------------------- /nodejs/esm.mjs: -------------------------------------------------------------------------------- 1 | import newrelic from 'newrelic' 2 | import fs from 'node:fs' 3 | import path from 'node:path' 4 | 5 | process.env.NEW_RELIC_APP_NAME = process.env.NEW_RELIC_APP_NAME || process.env.AWS_LAMBDA_FUNCTION_NAME 6 | process.env.NEW_RELIC_DISTRIBUTED_TRACING_ENABLED = process.env.NEW_RELIC_DISTRIBUTED_TRACING_ENABLED || 'true' 7 | process.env.NEW_RELIC_NO_CONFIG_FILE = process.env.NEW_RELIC_NO_CONFIG_FILE || 'true' 8 | process.env.NEW_RELIC_TRUSTED_ACCOUNT_KEY = 9 | process.env.NEW_RELIC_TRUSTED_ACCOUNT_KEY || process.env.NEW_RELIC_ACCOUNT_ID 10 | 11 | if (process.env.LAMBDA_TASK_ROOT && typeof process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED !== 'undefined') { 12 | delete process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED 13 | } 14 | function getNestedHandler(object, nestedProperty) { 15 | return nestedProperty.split('.').reduce((nested, key) => { 16 | return nested && nested[key] 17 | }, object) 18 | } 19 | function getHandlerPath() { 20 | let handler 21 | const { NEW_RELIC_LAMBDA_HANDLER } = process.env 22 | 23 | if (!NEW_RELIC_LAMBDA_HANDLER) { 24 | throw new Error('No NEW_RELIC_LAMBDA_HANDLER environment variable set.') 25 | } else { 26 | handler = NEW_RELIC_LAMBDA_HANDLER 27 | } 28 | 29 | const parts = handler.split('.') 30 | 31 | if (parts.length < 2) { 32 | throw new Error( 33 | `Improperly formatted handler environment variable: ${handler}` 34 | ) 35 | } 36 | 37 | const lastSlashIndex = handler.lastIndexOf('/') + 1 38 | const firstDotAfterSlash = handler.indexOf('.', lastSlashIndex) 39 | const moduleToImport = handler.slice(0, firstDotAfterSlash) 40 | const handlerToWrap = handler.slice(firstDotAfterSlash + 1) 41 | 42 | return {moduleToImport, handlerToWrap} 43 | } 44 | 45 | function handleRequireImportError(e, moduleToImport) { 46 | if (e.code === 'MODULE_NOT_FOUND') { 47 | return new Error(`Unable to import module '${moduleToImport}'`) 48 | } 49 | return e 50 | } 51 | 52 | function getFullyQualifiedModulePath(modulePath, extensions) { 53 | let fullModulePath 54 | 55 | extensions.forEach((extension) => { 56 | const filePath = modulePath + extension 57 | if (fs.existsSync(filePath)) { 58 | fullModulePath = filePath 59 | return 60 | } 61 | }) 62 | 63 | if (!fullModulePath) { 64 | throw new Error( 65 | `Unable to resolve module file at ${modulePath} with the following extensions: ${extensions.join(',')}` 66 | ) 67 | } 68 | 69 | return fullModulePath 70 | } 71 | 72 | async function getModuleWithImport(appRoot, moduleToImport) { 73 | const modulePath = path.resolve(appRoot, moduleToImport) 74 | const validExtensions = ['.mjs', '.js'] 75 | const fullModulePath = getFullyQualifiedModulePath(modulePath, validExtensions) 76 | 77 | try { 78 | return await import(fullModulePath) 79 | } catch (err) { 80 | throw handleRequireImportError(err, moduleToImport) 81 | } 82 | } 83 | 84 | function validateHandlerDefinition(userHandler, handlerName, moduleName) { 85 | if (typeof userHandler === 'undefined') { 86 | throw new Error( 87 | `Handler '${handlerName}' missing on module '${moduleName}'` 88 | ) 89 | } 90 | 91 | if (typeof userHandler !== 'function') { 92 | throw new Error( 93 | `Handler '${handlerName}' from '${moduleName}' is not a function` 94 | ) 95 | } 96 | } 97 | 98 | const { LAMBDA_TASK_ROOT = '.' } = process.env 99 | const { moduleToImport, handlerToWrap } = getHandlerPath() 100 | 101 | const userHandler = await getHandler() 102 | const handler = newrelic.setLambdaHandler(userHandler) 103 | 104 | async function getHandler() { 105 | const userModule = await getModuleWithImport(LAMBDA_TASK_ROOT, moduleToImport) 106 | const userHandler = getNestedHandler(userModule, handlerToWrap) 107 | validateHandlerDefinition(userHandler, handlerToWrap, moduleToImport) 108 | 109 | return userHandler 110 | } 111 | 112 | export { handler, getHandlerPath } 113 | 114 | -------------------------------------------------------------------------------- /nodejs/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | process.env.NEW_RELIC_APP_NAME = process.env.NEW_RELIC_APP_NAME || process.env.AWS_LAMBDA_FUNCTION_NAME 4 | process.env.NEW_RELIC_DISTRIBUTED_TRACING_ENABLED = process.env.NEW_RELIC_DISTRIBUTED_TRACING_ENABLED || 'true' 5 | process.env.NEW_RELIC_NO_CONFIG_FILE = process.env.NEW_RELIC_NO_CONFIG_FILE || 'true' 6 | process.env.NEW_RELIC_TRUSTED_ACCOUNT_KEY = 7 | process.env.NEW_RELIC_TRUSTED_ACCOUNT_KEY || process.env.NEW_RELIC_ACCOUNT_ID 8 | 9 | if (process.env.LAMBDA_TASK_ROOT && typeof process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED !== 'undefined') { 10 | delete process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED 11 | } 12 | 13 | const newrelic = require('newrelic') 14 | const fs = require('node:fs') 15 | const path = require('node:path') 16 | 17 | function getHandlerPath() { 18 | let handler 19 | const { NEW_RELIC_LAMBDA_HANDLER } = process.env 20 | 21 | if (!NEW_RELIC_LAMBDA_HANDLER) { 22 | throw new Error('No NEW_RELIC_LAMBDA_HANDLER environment variable set.') 23 | } else { 24 | handler = NEW_RELIC_LAMBDA_HANDLER 25 | } 26 | 27 | const parts = handler.split('.') 28 | 29 | if (parts.length < 2) { 30 | throw new Error( 31 | `Improperly formatted handler environment variable: ${handler}` 32 | ) 33 | } 34 | 35 | const handlerToWrap = parts[parts.length - 1] 36 | const moduleToImport = handler.slice(0, handler.lastIndexOf('.')) 37 | return { moduleToImport, handlerToWrap } 38 | } 39 | 40 | function handleRequireImportError(e, moduleToImport) { 41 | if (e.code === 'MODULE_NOT_FOUND') { 42 | return new Error(`Unable to import module '${moduleToImport}'`) 43 | } 44 | return e 45 | } 46 | 47 | function getFullyQualifiedModulePath(modulePath, extensions) { 48 | let fullModulePath 49 | 50 | extensions.forEach((extension) => { 51 | const filePath = modulePath + extension 52 | if (fs.existsSync(filePath)) { 53 | fullModulePath = filePath 54 | return 55 | } 56 | }) 57 | 58 | if (!fullModulePath) { 59 | throw new Error( 60 | `Unable to resolve module file at ${modulePath} with the following extensions: ${extensions.join(',')}` 61 | ) 62 | } 63 | 64 | return fullModulePath 65 | } 66 | 67 | async function getModuleWithImport(appRoot, moduleToImport) { 68 | const modulePath = path.resolve(appRoot, moduleToImport) 69 | const validExtensions = ['.mjs', '.js'] 70 | const fullModulePath = getFullyQualifiedModulePath(modulePath, validExtensions) 71 | 72 | try { 73 | return await import(fullModulePath) 74 | } catch (err) { 75 | throw handleRequireImportError(err, moduleToImport) 76 | } 77 | } 78 | 79 | function getModuleWithRequire(appRoot, moduleToImport) { 80 | const modulePath = path.resolve(appRoot, moduleToImport) 81 | const validExtensions = ['.cjs', '.js'] 82 | const fullModulePath = getFullyQualifiedModulePath(modulePath, validExtensions) 83 | 84 | try { 85 | return require(fullModulePath) 86 | } catch (err) { 87 | throw handleRequireImportError(err, moduleToImport) 88 | } 89 | } 90 | 91 | function validateHandlerDefinition(userHandler, handlerName, moduleName) { 92 | if (typeof userHandler === 'undefined') { 93 | throw new Error( 94 | `Handler '${handlerName}' missing on module '${moduleName}'` 95 | ) 96 | } 97 | 98 | if (typeof userHandler !== 'function') { 99 | throw new Error( 100 | `Handler '${handlerName}' from '${moduleName}' is not a function` 101 | ) 102 | } 103 | } 104 | 105 | let wrappedHandler 106 | let patchedHandlerPromise 107 | 108 | const { LAMBDA_TASK_ROOT = '.' } = process.env 109 | const { moduleToImport, handlerToWrap } = getHandlerPath() 110 | 111 | if (process.env.NEW_RELIC_USE_ESM === 'true') { 112 | patchedHandlerPromise = getHandler().then(userHandler => { 113 | return newrelic.setLambdaHandler(userHandler) 114 | }) 115 | } else { 116 | wrappedHandler = newrelic.setLambdaHandler(getHandlerSync()) 117 | } 118 | 119 | async function getHandler() { 120 | const userHandler = (await getModuleWithImport(LAMBDA_TASK_ROOT, moduleToImport))[handlerToWrap] 121 | validateHandlerDefinition(userHandler, handlerToWrap, moduleToImport) 122 | 123 | return userHandler 124 | } 125 | 126 | function getHandlerSync() { 127 | const userHandler = getModuleWithRequire(LAMBDA_TASK_ROOT, moduleToImport)[handlerToWrap] 128 | validateHandlerDefinition(userHandler, handlerToWrap, moduleToImport) 129 | 130 | return userHandler 131 | } 132 | 133 | async function patchHandler() { 134 | const args = Array.prototype.slice.call(arguments) 135 | return patchedHandlerPromise 136 | .then(_wrappedHandler => _wrappedHandler.apply(this, args)) 137 | } 138 | 139 | let handler 140 | 141 | if (process.env.NEW_RELIC_USE_ESM === 'true') { 142 | handler = patchHandler 143 | } else { 144 | handler = wrappedHandler 145 | } 146 | 147 | 148 | module.exports = { 149 | handler, 150 | getHandlerPath 151 | } 152 | -------------------------------------------------------------------------------- /nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "newrelic-lambda-layers", 3 | "version": "11.5.0", 4 | "description": "New Relic Installer for serverless APM layers.", 5 | "main": "", 6 | "files": [ 7 | "../dist", 8 | "package.json", 9 | "README.md" 10 | ], 11 | "scripts": { 12 | "build": "npm install && mkdir -p node_modules/newrelic-lambda-wrapper && cp index.js node_modules/newrelic-lambda-wrapper && mkdir -p nodejs && cp -r node_modules nodejs && zip -rq ../dist/nodejs/NewRelicLayer.zip nodejs && rm -rf ./nodejs", 13 | "clean": "rm ../dist/nodejs/NewRelicLayer.zip", 14 | "lint": "eslint ./*.js test", 15 | "lint:fix": "eslint --fix ./*.js test", 16 | "test": "npm run lint && npm run test:unit && npm run test:integration", 17 | "test:unit": "rm -f newrelic_agent.log && c8 -o ./coverage/unit tap --test-regex='(\\/|^test\\/unit\\/.*\\.tap\\.js)$' --timeout=180 --no-coverage --reporter classic", 18 | "test:integration": "npm run test:integration:cjs && npm run test:integration:esm && npm run test:integration:legacy-esm", 19 | "test:integration:cjs": "HANDLER=../../index.handler NEW_RELIC_USE_ESM=false MODULE_TYPE=cjs tap --timeout=180 --no-coverage --reporter classic test/integration/cjs/*.tap.js", 20 | "test:integration:legacy-esm": "HANDLER=../../index.handler NEW_RELIC_USE_ESM=true MODULE_TYPE=esm tap --timeout=180 --no-coverage --reporter classic test/integration/esm/*.tap.js", 21 | "test:integration:esm": "NEW_RELIC_USE_ESM=false HANDLER=../../esm.handler MODULE_TYPE=esm tap --timeout=180 --no-coverage --reporter classic test/integration/esm/*.tap.js" 22 | }, 23 | "repository": { 24 | "type": "git", 25 | "url": "git+https://github.com/newrelic/newrelic-lambda-layers.git" 26 | }, 27 | "author": "newrelic", 28 | "license": "Apache-2.0", 29 | "bugs": { 30 | "url": "https://github.com/newrelic/newrelic-lambda-layers/issues" 31 | }, 32 | "homepage": "https://github.com/newrelic/newrelic-lambda-layers#readme", 33 | "dependencies": { 34 | "newrelic": "^12.9.0" 35 | }, 36 | "keywords": [ 37 | "lambda", 38 | "serverless", 39 | "sls", 40 | "agent", 41 | "analytics", 42 | "metrics", 43 | "telemetry", 44 | "tracing", 45 | "distributed tracing", 46 | "layers" 47 | ], 48 | "devDependencies": { 49 | "@newrelic/test-utilities": "^8.7.0", 50 | "c8": "^7.12.0", 51 | "eslint": "^8.23.1", 52 | "eslint-plugin-import": "^2.26.0", 53 | "node-fetch": "^2.6.11", 54 | "proxyquire": "^2.1.3", 55 | "serverless": "^3.40.0", 56 | "serverless-offline": "^13.9.0", 57 | "tap": "^16.3.0", 58 | "testdouble": "^3.20.2" 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /nodejs/publish-layers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -Eeuo pipefail 4 | 5 | BUILD_DIR=nodejs 6 | DIST_DIR=dist 7 | 8 | source ../libBuild.sh 9 | 10 | function usage { 11 | echo "./publish-layers.sh [build-18|build-20|build-22|publish-18|publish-20|publish-22]" 12 | } 13 | 14 | function make_package_json { 15 | cat <fake-package.json 16 | { 17 | "name": "newrelic-esm-lambda-wrapper", 18 | "type": "module" 19 | } 20 | EOM 21 | } 22 | 23 | function build_wrapper { 24 | node_version=$1 25 | arch=$2 26 | echo "Building new relic layer for nodejs${node_version}.x (${arch})" 27 | ZIP=$DIST_DIR/nodejs${node_version}x.${arch}.zip 28 | rm -rf $BUILD_DIR $ZIP 29 | mkdir -p $DIST_DIR 30 | npm install --prefix $BUILD_DIR newrelic@latest 31 | NEWRELIC_AGENT_VERSION=$(npm list newrelic --prefix $BUILD_DIR | grep newrelic@ | awk -F '@' '{print $2}') 32 | touch $DIST_DIR/nr-env 33 | echo "NEWRELIC_AGENT_VERSION=$NEWRELIC_AGENT_VERSION" > $DIST_DIR/nr-env 34 | mkdir -p $BUILD_DIR/node_modules/newrelic-lambda-wrapper 35 | cp index.js $BUILD_DIR/node_modules/newrelic-lambda-wrapper 36 | mkdir -p $BUILD_DIR/node_modules/newrelic-esm-lambda-wrapper 37 | cp esm.mjs $BUILD_DIR/node_modules/newrelic-esm-lambda-wrapper/index.js 38 | make_package_json 39 | cp fake-package.json $BUILD_DIR/node_modules/newrelic-esm-lambda-wrapper/package.json 40 | download_extension $arch 41 | zip -rq $ZIP $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 42 | rm -rf fake-package.json $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 43 | echo "Build complete: ${ZIP}" 44 | } 45 | 46 | function publish_wrapper { 47 | node_version=$1 48 | arch=$2 49 | ZIP=$DIST_DIR/nodejs${node_version}x.${arch}.zip 50 | source $DIST_DIR/nr-env 51 | if [ ! -f $ZIP ]; then 52 | echo "Package not found: ${ZIP}" 53 | exit 1 54 | fi 55 | 56 | for region in "${REGIONS_ARM[@]}"; do 57 | publish_layer $ZIP $region nodejs${node_version}.x ${arch} $NEWRELIC_AGENT_VERSION 58 | done 59 | } 60 | 61 | case "$1" in 62 | "build_wrapper") 63 | build_wrapper $2 $3 64 | ;; 65 | "publish_wrapper") 66 | publish_wrapper $2 $3 67 | ;; 68 | "build-18") 69 | build_wrapper 18 arm64 70 | build_wrapper 18 x86_64 71 | ;; 72 | "publish-18") 73 | publish_wrapper 18 arm64 74 | publish_wrapper 18 x86_64 75 | ;; 76 | "build-20") 77 | build_wrapper 20 arm64 78 | build_wrapper 20 x86_64 79 | ;; 80 | "publish-20") 81 | publish_wrapper 20 arm64 82 | publish_wrapper 20 x86_64 83 | ;; 84 | "build-22") 85 | build_wrapper 22 arm64 86 | build_wrapper 22 x86_64 87 | ;; 88 | "publish-22") 89 | publish_wrapper 22 arm64 90 | publish_wrapper 22 x86_64 91 | ;; 92 | "build-publish-18-ecr-image") 93 | build_wrapper 18 arm64 94 | publish_docker_ecr $DIST_DIR/nodejs18x.arm64.zip nodejs18.x arm64 95 | build_wrapper 18 x86_64 96 | publish_docker_ecr $DIST_DIR/nodejs18x.x86_64.zip nodejs18.x x86_64 97 | ;; 98 | "build-publish-20-ecr-image") 99 | build_wrapper 20 arm64 100 | publish_docker_ecr $DIST_DIR/nodejs20x.arm64.zip nodejs20.x arm64 101 | build_wrapper 20 x86_64 102 | publish_docker_ecr $DIST_DIR/nodejs20x.x86_64.zip nodejs20.x x86_64 103 | ;; 104 | "build-publish-22-ecr-image") 105 | build_wrapper 22 arm64 106 | publish_docker_ecr $DIST_DIR/nodejs22x.arm64.zip nodejs22.x arm64 107 | build_wrapper 22 x86_64 108 | publish_docker_ecr $DIST_DIR/nodejs22x.x86_64.zip nodejs22.x x86_64 109 | ;; 110 | "nodejs18") 111 | $0 build-18 112 | $0 publish-18 113 | ;; 114 | "nodejs20") 115 | $0 build-20 116 | $0 publish-20 117 | ;; 118 | "nodejs22") 119 | $0 build-22 120 | $0 publish-22 121 | ;; 122 | *) 123 | usage 124 | ;; 125 | esac 126 | -------------------------------------------------------------------------------- /nodejs/test/integration/cjs/handler.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { stringify } = JSON 4 | 5 | module.exports.contextDoneHandler = function contextDoneHandler(event, context) { 6 | context.done(null, { 7 | body: stringify('foo'), 8 | statusCode: 200, 9 | }) 10 | } 11 | 12 | module.exports.contextDoneHandlerDeferred = function contextDoneHandlerDeferred(event, context) { 13 | setTimeout( 14 | () => 15 | context.done(null, { 16 | body: stringify('foo'), 17 | statusCode: 200, 18 | }), 19 | 100, 20 | ) 21 | } 22 | 23 | module.exports.contextSucceedHandler = function contextSucceedHandler(event, context) { 24 | context.succeed({ 25 | body: stringify('foo'), 26 | statusCode: 200, 27 | }) 28 | } 29 | 30 | module.exports.contextSucceedHandlerDeferred = function contextSucceedHandlerDeferred(event, context) { 31 | setTimeout( 32 | () => 33 | context.succeed({ 34 | body: stringify('foo'), 35 | statusCode: 200, 36 | }), 37 | 100, 38 | ) 39 | } 40 | 41 | module.exports.callbackHandler = function callbackHandler(event, context, callback) { 42 | callback(null, { 43 | body: stringify('foo'), 44 | statusCode: 200, 45 | }) 46 | } 47 | 48 | module.exports.callbackHandlerDeferred = function callbackHandlerDeferred(event, context, callback) { 49 | setTimeout( 50 | () => 51 | callback(null, { 52 | body: stringify('foo'), 53 | statusCode: 200, 54 | }), 55 | 100, 56 | ) 57 | } 58 | 59 | module.exports.promiseHandler = function promiseHandler() { 60 | return Promise.resolve({ 61 | body: stringify('foo'), 62 | statusCode: 200, 63 | }) 64 | } 65 | 66 | module.exports.promiseHandlerDeferred = function promiseHandlerDeferred() { 67 | return new Promise((resolve) => { 68 | setTimeout( 69 | () => 70 | resolve({ 71 | body: stringify('foo'), 72 | statusCode: 200, 73 | }), 74 | 100, 75 | ) 76 | }) 77 | } 78 | 79 | module.exports.asyncFunctionHandler = async function asyncFunctionHandler() { 80 | return { 81 | body: stringify('foo'), 82 | statusCode: 200, 83 | } 84 | } 85 | // we deliberately test the case where a 'callback' is defined 86 | // in the handler, but a promise is being returned to protect from a 87 | // potential naive implementation, e.g. 88 | // 89 | // const { promisify } = 'utils' 90 | // const promisifiedHandler = handler.length === 3 ? promisify(handler) : handler 91 | // 92 | // if someone would return a promise, but also defines callback, without using it 93 | // the handler would not be returning anything 94 | module.exports.promiseWithDefinedCallbackHandler = function promiseWithDefinedCallbackHandler( 95 | event, // eslint-disable-line no-unused-vars 96 | context, // eslint-disable-line no-unused-vars 97 | callback, // eslint-disable-line no-unused-vars 98 | ) { 99 | return Promise.resolve({ 100 | body: stringify('Hello Promise!'), 101 | statusCode: 200, 102 | }) 103 | } 104 | 105 | module.exports.contextSucceedWithContextDoneHandler = function contextSucceedWithContextDoneHandler(event, context) { 106 | context.succeed({ 107 | body: stringify('Hello Context.succeed!'), 108 | statusCode: 200, 109 | }) 110 | context.done(null, { 111 | body: stringify('Hello Context.done!'), 112 | statusCode: 200, 113 | }) 114 | } 115 | 116 | module.exports.callbackWithContextDoneHandler = function callbackWithContextDoneHandler(event, context, callback) { 117 | callback(null, { 118 | body: stringify('Hello Callback!'), 119 | statusCode: 200, 120 | }) 121 | context.done(null, { 122 | body: stringify('Hello Context.done!'), 123 | statusCode: 200, 124 | }) 125 | } 126 | 127 | module.exports.callbackWithPromiseHandler = function callbackWithPromiseHandler(event, context, callback) { 128 | callback(null, { 129 | body: stringify('Hello Callback!'), 130 | statusCode: 200, 131 | }) 132 | return Promise.resolve({ 133 | body: stringify('Hello Promise!'), 134 | statusCode: 200, 135 | }) 136 | } 137 | 138 | module.exports.callbackInsidePromiseHandler = function callbackInsidePromiseHandler(event, context, callback) { 139 | return new Promise((resolve) => { 140 | callback(null, { 141 | body: stringify('Hello Callback!'), 142 | statusCode: 200, 143 | }) 144 | resolve({ 145 | body: stringify('Hello Promise!'), 146 | statusCode: 200, 147 | }) 148 | }) 149 | } 150 | 151 | module.exports.throwExceptionInPromiseHandler = async() => { 152 | throw NaN 153 | } 154 | 155 | module.exports.throwExceptionInCallbackHandler = () => { 156 | throw NaN 157 | } 158 | 159 | module.exports.NoAnswerInPromiseHandler = async() => {} 160 | 161 | module.exports.BadAnswerInPromiseHandler = async() => { 162 | return {} 163 | } 164 | 165 | module.exports.BadAnswerInCallbackHandler = (event, context, callback) => { 166 | callback(null, {}) 167 | } 168 | 169 | module.exports.TestPathVariable = (event, context, callback) => { 170 | callback(null, { 171 | body: stringify(event.path), 172 | statusCode: 200, 173 | }) 174 | } 175 | 176 | module.exports.TestResourceVariable = (event, context, callback) => { 177 | callback(null, { 178 | body: stringify(event.resource), 179 | statusCode: 200, 180 | }) 181 | } 182 | 183 | module.exports.TestPayloadSchemaValidation = (event, context, callback) => { 184 | callback(null, { 185 | body: stringify(event.body), 186 | statusCode: 200, 187 | }) 188 | } 189 | -------------------------------------------------------------------------------- /nodejs/test/integration/cjs/index.tap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const fetch = require('node-fetch') 5 | const { startSlsOffline, stopSlsOffline } = require('../helpers.cjs') 6 | const BASE_URL = 'http://localhost:3000' 7 | 8 | tap.test('CJS Handler Integration Tests', (t) => { 9 | t.autoend() 10 | 11 | t.before(async() => { 12 | await startSlsOffline() 13 | }) 14 | 15 | t.teardown(() => { 16 | stopSlsOffline() 17 | }) 18 | 19 | ;[ 20 | { 21 | description: 'when handler uses context.done', 22 | expected: 'foo', 23 | path: '/dev/context-done-handler', 24 | status: 200, 25 | }, 26 | { 27 | description: 'when handler uses context.done which is deferred', 28 | expected: 'foo', 29 | path: '/dev/context-done-handler-deferred', 30 | status: 200, 31 | }, 32 | { 33 | description: 'when handler uses context.succeed', 34 | expected: 'foo', 35 | path: '/dev/context-succeed-handler', 36 | status: 200, 37 | }, 38 | { 39 | description: 'when handler uses context.succeed which is deferred', 40 | expected: 'foo', 41 | path: '/dev/context-succeed-handler-deferred', 42 | status: 200, 43 | }, 44 | { 45 | description: 'when handler uses a callback', 46 | expected: 'foo', 47 | path: '/dev/callback-handler', 48 | status: 200, 49 | }, 50 | { 51 | description: 'when handler uses a callback which is deferred', 52 | expected: 'foo', 53 | path: '/dev/callback-handler-deferred', 54 | status: 200, 55 | }, 56 | { 57 | description: 'when handler returns a promise', 58 | expected: 'foo', 59 | path: '/dev/promise-handler', 60 | status: 200, 61 | }, 62 | { 63 | description: 'when handler uses a promise which is deferred', 64 | expected: 'foo', 65 | path: '/dev/promise-handler-deferred', 66 | status: 200, 67 | }, 68 | { 69 | description: 'when handler uses an async function', 70 | expected: 'foo', 71 | path: '/dev/async-function-handler', 72 | status: 200, 73 | }, 74 | // NOTE: mix and matching of callbacks and promises is not recommended, 75 | // nonetheless, we test some of the behaviour to match AWS execution precedence 76 | { 77 | description: 78 | 'when handler returns a promise but defines a callback parameter', 79 | expected: 'Hello Promise!', 80 | path: '/dev/promise-with-defined-callback-handler', 81 | status: 200, 82 | }, 83 | { 84 | description: 85 | 'when handler throws an exception in promise should return 502', 86 | path: '/dev/throw-exception-in-promise-handler', 87 | status: 502, 88 | }, 89 | { 90 | description: 91 | 'when handler throws an exception before calling callback should return 502', 92 | path: '/dev/throw-exception-in-callback-handler', 93 | status: 502, 94 | }, 95 | { 96 | description: 97 | 'when handler does not return any answer in promise should return 502', 98 | path: '/dev/no-answer-in-promise-handler', 99 | status: 502, 100 | }, 101 | { 102 | description: 103 | 'when handler returns bad answer in promise should return 200', 104 | path: '/dev/bad-answer-in-promise-handler', 105 | status: 200, 106 | }, 107 | { 108 | description: 109 | 'when handler returns bad answer in callback should return 200', 110 | path: '/dev/bad-answer-in-callback-handler', 111 | status: 200, 112 | }, 113 | { 114 | description: 'when handler calls context.succeed and context.done', 115 | expected: 'Hello Context.succeed!', 116 | path: '/dev/context-succeed-with-context-done-handler', 117 | status: 200, 118 | }, 119 | { 120 | description: 'when handler calls callback and context.done', 121 | expected: 'Hello Callback!', 122 | path: '/dev/callback-with-context-done-handler', 123 | status: 200, 124 | }, 125 | { 126 | description: 'when handler calls callback and returns Promise', 127 | expected: 'Hello Callback!', 128 | path: '/dev/callback-with-promise-handler', 129 | status: 200, 130 | }, 131 | { 132 | description: 'when handler calls callback inside returned Promise', 133 | expected: 'Hello Callback!', 134 | path: '/dev/callback-inside-promise-handler', 135 | status: 200, 136 | }, 137 | ].forEach(({ description, expected, path, status }) => { 138 | t.test(description, async(t) => { 139 | const url = new URL(path, BASE_URL) 140 | 141 | const response = await fetch(url) 142 | t.equal(response.status, status, 'should have the expected status code') 143 | 144 | if (expected) { 145 | const json = await response.json() 146 | t.same(json, expected, 'should have the expected response') 147 | } 148 | 149 | t.end() 150 | }) 151 | }) 152 | }) 153 | -------------------------------------------------------------------------------- /nodejs/test/integration/cjs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cjs", 3 | "type": "commonjs" 4 | } 5 | -------------------------------------------------------------------------------- /nodejs/test/integration/esm/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "parserOptions": { 3 | "sourceType": "module" 4 | } 5 | } -------------------------------------------------------------------------------- /nodejs/test/integration/esm/handler.js: -------------------------------------------------------------------------------- 1 | const { stringify } = JSON 2 | 3 | export function contextDoneHandler(event, context) { 4 | context.done(null, { 5 | body: stringify('foo'), 6 | statusCode: 200, 7 | }) 8 | } 9 | 10 | export function contextDoneHandlerDeferred(event, context) { 11 | setTimeout( 12 | () => 13 | context.done(null, { 14 | body: stringify('foo'), 15 | statusCode: 200, 16 | }), 17 | 100, 18 | ) 19 | } 20 | 21 | export function contextSucceedHandler(event, context) { 22 | context.succeed({ 23 | body: stringify('foo'), 24 | statusCode: 200, 25 | }) 26 | } 27 | 28 | export function contextSucceedHandlerDeferred(event, context) { 29 | setTimeout( 30 | () => 31 | context.succeed({ 32 | body: stringify('foo'), 33 | statusCode: 200, 34 | }), 35 | 100, 36 | ) 37 | } 38 | 39 | export function callbackHandler(event, context, callback) { 40 | callback(null, { 41 | body: stringify('foo'), 42 | statusCode: 200, 43 | }) 44 | } 45 | 46 | export function callbackHandlerDeferred(event, context, callback) { 47 | setTimeout( 48 | () => 49 | callback(null, { 50 | body: stringify('foo'), 51 | statusCode: 200, 52 | }), 53 | 100, 54 | ) 55 | } 56 | 57 | export function promiseHandler() { 58 | return Promise.resolve({ 59 | body: stringify('foo'), 60 | statusCode: 200, 61 | }) 62 | } 63 | 64 | export function promiseHandlerDeferred() { 65 | return new Promise((resolve) => { 66 | setTimeout( 67 | () => 68 | resolve({ 69 | body: stringify('foo'), 70 | statusCode: 200, 71 | }), 72 | 100, 73 | ) 74 | }) 75 | } 76 | 77 | export async function asyncFunctionHandler() { 78 | return { 79 | body: stringify('foo'), 80 | statusCode: 200, 81 | } 82 | } 83 | 84 | // we deliberately test the case where a 'callback' is defined 85 | // in the handler, but a promise is being returned to protect from a 86 | // potential naive implementation, e.g. 87 | // 88 | // const { promisify } = 'utils' 89 | // const promisifiedHandler = handler.length === 3 ? promisify(handler) : handler 90 | // 91 | // if someone would return a promise, but also defines callback, without using it 92 | // the handler would not be returning anything 93 | export function promiseWithDefinedCallbackHandler( 94 | event, // eslint-disable-line no-unused-vars 95 | context, // eslint-disable-line no-unused-vars 96 | callback, // eslint-disable-line no-unused-vars 97 | ) { 98 | return Promise.resolve({ 99 | body: stringify('Hello Promise!'), 100 | statusCode: 200, 101 | }) 102 | } 103 | 104 | export function contextSucceedWithContextDoneHandler(event, context) { 105 | context.succeed({ 106 | body: stringify('Hello Context.succeed!'), 107 | statusCode: 200, 108 | }) 109 | 110 | context.done(null, { 111 | body: stringify('Hello Context.done!'), 112 | statusCode: 200, 113 | }) 114 | } 115 | 116 | export function callbackWithContextDoneHandler(event, context, callback) { 117 | callback(null, { 118 | body: stringify('Hello Callback!'), 119 | statusCode: 200, 120 | }) 121 | 122 | context.done(null, { 123 | body: stringify('Hello Context.done!'), 124 | statusCode: 200, 125 | }) 126 | } 127 | 128 | export function callbackWithPromiseHandler(event, context, callback) { 129 | callback(null, { 130 | body: stringify('Hello Callback!'), 131 | statusCode: 200, 132 | }) 133 | 134 | return Promise.resolve({ 135 | body: stringify('Hello Promise!'), 136 | statusCode: 200, 137 | }) 138 | } 139 | 140 | export function callbackInsidePromiseHandler(event, context, callback) { 141 | return new Promise((resolve) => { 142 | callback(null, { 143 | body: stringify('Hello Callback!'), 144 | statusCode: 200, 145 | }) 146 | 147 | resolve({ 148 | body: stringify('Hello Promise!'), 149 | statusCode: 200, 150 | }) 151 | }) 152 | } 153 | 154 | export const throwExceptionInPromiseHandler = async() => { 155 | throw NaN 156 | } 157 | 158 | export const throwExceptionInCallbackHandler = () => { 159 | throw NaN 160 | } 161 | 162 | export const NoAnswerInPromiseHandler = async() => {} 163 | 164 | export const BadAnswerInPromiseHandler = async() => { 165 | return {} 166 | } 167 | 168 | export const BadAnswerInCallbackHandler = (event, context, callback) => { 169 | callback(null, {}) 170 | } 171 | -------------------------------------------------------------------------------- /nodejs/test/integration/esm/index.tap.js: -------------------------------------------------------------------------------- 1 | import tap from 'tap' 2 | import fetch from 'node-fetch' 3 | import { startSlsOffline, stopSlsOffline } from '../helpers.cjs' 4 | const BASE_URL = 'http://localhost:3000' 5 | 6 | tap.test('ESM Handler Integration Tests', (t) => { 7 | t.autoend() 8 | 9 | t.before(async() => { 10 | await startSlsOffline() 11 | }) 12 | 13 | t.teardown(() => { 14 | stopSlsOffline() 15 | }) 16 | 17 | ;[ 18 | { 19 | description: 'when handler uses context.done', 20 | expected: 'foo', 21 | path: '/dev/context-done-handler', 22 | status: 200, 23 | }, 24 | // Not supported in ESM due to await import() 25 | // { 26 | // description: 'when handler uses context.done which is deferred', 27 | // expected: 'foo', 28 | // path: '/dev/context-done-handler-deferred', 29 | // status: 200, 30 | // }, 31 | { 32 | description: 'when handler uses context.succeed', 33 | expected: 'foo', 34 | path: '/dev/context-succeed-handler', 35 | status: 200, 36 | }, 37 | // Not supported in ESM due to await import() 38 | // { 39 | // description: 'when handler uses context.succeed which is deferred', 40 | // expected: 'foo', 41 | // path: '/dev/context-succeed-handler-deferred', 42 | // status: 200, 43 | // }, 44 | { 45 | description: 'when handler uses a callback', 46 | expected: 'foo', 47 | path: '/dev/callback-handler', 48 | status: 200, 49 | }, 50 | // Not supported in ESM due to await import() 51 | // { 52 | // description: 'when handler uses a callback which is deferred', 53 | // expected: 'foo', 54 | // path: '/dev/callback-handler-deferred', 55 | // status: 200, 56 | // }, 57 | { 58 | description: 'when handler returns a promise', 59 | expected: 'foo', 60 | path: '/dev/promise-handler', 61 | status: 200, 62 | }, 63 | { 64 | description: 'when handler uses a promise which is deferred', 65 | expected: 'foo', 66 | path: '/dev/promise-handler-deferred', 67 | status: 200, 68 | }, 69 | { 70 | description: 'when handler uses an async function', 71 | expected: 'foo', 72 | path: '/dev/async-function-handler', 73 | status: 200, 74 | }, 75 | // NOTE: mix and matching of callbacks and promises is not recommended, 76 | // nonetheless, we test some of the behaviour to match AWS execution precedence 77 | { 78 | description: 79 | 'when handler returns a promise but defines a callback parameter', 80 | expected: 'Hello Promise!', 81 | path: '/dev/promise-with-defined-callback-handler', 82 | status: 200, 83 | }, 84 | { 85 | description: 86 | 'when handler throws an exception in promise should return 502', 87 | path: '/dev/throw-exception-in-promise-handler', 88 | status: 502, 89 | }, 90 | { 91 | description: 92 | 'when handler throws an exception before calling callback should return 502', 93 | path: '/dev/throw-exception-in-callback-handler', 94 | status: 502, 95 | }, 96 | { 97 | description: 98 | 'when handler does not return any answer in promise should return 502', 99 | path: '/dev/no-answer-in-promise-handler', 100 | status: 502, 101 | }, 102 | { 103 | description: 104 | 'when handler returns bad answer in promise should return 200', 105 | path: '/dev/bad-answer-in-promise-handler', 106 | status: 200, 107 | }, 108 | { 109 | description: 110 | 'when handler returns bad answer in callback should return 200', 111 | path: '/dev/bad-answer-in-callback-handler', 112 | status: 200, 113 | }, 114 | { 115 | description: 'when handler calls context.succeed and context.done', 116 | expected: 'Hello Context.succeed!', 117 | path: '/dev/context-succeed-with-context-done-handler', 118 | status: 200, 119 | }, 120 | { 121 | description: 'when handler calls callback and context.done', 122 | expected: 'Hello Callback!', 123 | path: '/dev/callback-with-context-done-handler', 124 | status: 200, 125 | }, 126 | { 127 | description: 'when handler calls callback and returns Promise', 128 | expected: 'Hello Callback!', 129 | path: '/dev/callback-with-promise-handler', 130 | status: 200, 131 | }, 132 | { 133 | description: 'when handler calls callback inside returned Promise', 134 | expected: 'Hello Callback!', 135 | path: '/dev/callback-inside-promise-handler', 136 | status: 200, 137 | }, 138 | ].forEach(({ description, expected, path, status }) => { 139 | t.test(description, async(t) => { 140 | const url = new URL(path, BASE_URL) 141 | 142 | const response = await fetch(url) 143 | t.equal(response.status, status, 'should have the expected status code') 144 | 145 | if (expected) { 146 | const json = await response.json() 147 | t.same(json, expected, 'should have the expected response') 148 | } 149 | 150 | t.end() 151 | }) 152 | }) 153 | }) 154 | -------------------------------------------------------------------------------- /nodejs/test/integration/esm/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "esm", 3 | "type": "module" 4 | } 5 | -------------------------------------------------------------------------------- /nodejs/test/integration/helpers.cjs: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { spawn } = require('node:child_process') 4 | 5 | let slsOfflineProcess 6 | 7 | exports.startSlsOffline = async function startSlsOffline() { 8 | return new Promise((resolve) => { 9 | slsOfflineProcess = spawn("serverless", ["offline", "start"], {cwd: __dirname}) 10 | 11 | slsOfflineProcess.stderr.on('data', (err) => { 12 | if (err.toString().includes('Server ready:')) { 13 | resolve(slsOfflineProcess) 14 | } 15 | }) 16 | }) 17 | } 18 | 19 | exports.stopSlsOffline = function stopSlsOffline() { 20 | slsOfflineProcess.kill() 21 | } 22 | -------------------------------------------------------------------------------- /nodejs/test/integration/serverless.yml: -------------------------------------------------------------------------------- 1 | service: integration-tests 2 | frameworkVersion: '3' 3 | 4 | custom: 5 | NODE_RUNTIME: ${env:NODE_RUNTIME, 'nodejs18.x'} 6 | 7 | plugins: 8 | - serverless-offline 9 | 10 | provider: 11 | name: aws 12 | runtime: ${self:custom.NODE_RUNTIME} 13 | 14 | functions: 15 | contextDoneHandler: 16 | events: 17 | - http: 18 | method: get 19 | path: context-done-handler 20 | handler: ${env:HANDLER} 21 | environment: 22 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 23 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.contextDoneHandler 24 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 25 | LAMBDA_TASK_ROOT: ./ 26 | contextDoneHandlerDeferred: 27 | events: 28 | - http: 29 | method: get 30 | path: context-done-handler-deferred 31 | handler: ${env:HANDLER} 32 | environment: 33 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 34 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.contextDoneHandlerDeferred 35 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 36 | LAMBDA_TASK_ROOT: ./ 37 | contextSucceedHandler: 38 | events: 39 | - http: 40 | method: get 41 | path: context-succeed-handler 42 | handler: ${env:HANDLER} 43 | environment: 44 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 45 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.contextSucceedHandler 46 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 47 | LAMBDA_TASK_ROOT: ./ 48 | contextSucceedHandlerDeferred: 49 | events: 50 | - http: 51 | method: get 52 | path: context-succeed-handler-deferred 53 | handler: ${env:HANDLER} 54 | environment: 55 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 56 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.contextSucceedHandlerDeferred 57 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 58 | LAMBDA_TASK_ROOT: ./ 59 | callbackHandler: 60 | events: 61 | - http: 62 | method: get 63 | path: callback-handler 64 | handler: ${env:HANDLER} 65 | environment: 66 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 67 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.callbackHandler 68 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 69 | LAMBDA_TASK_ROOT: ./ 70 | callbackHandlerDeferred: 71 | events: 72 | - http: 73 | method: get 74 | path: callback-handler-deferred 75 | handler: ${env:HANDLER} 76 | environment: 77 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 78 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.callbackHandlerDeferred 79 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 80 | LAMBDA_TASK_ROOT: ./ 81 | promiseHandler: 82 | events: 83 | - http: 84 | method: get 85 | path: promise-handler 86 | handler: ${env:HANDLER} 87 | environment: 88 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 89 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.promiseHandler 90 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 91 | LAMBDA_TASK_ROOT: ./ 92 | promiseHandlerDeferred: 93 | events: 94 | - http: 95 | method: get 96 | path: promise-handler-deferred 97 | handler: ${env:HANDLER} 98 | environment: 99 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 100 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.promiseHandlerDeferred 101 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 102 | LAMBDA_TASK_ROOT: ./ 103 | asyncFunctionHandler: 104 | events: 105 | - http: 106 | method: get 107 | path: async-function-handler 108 | handler: ${env:HANDLER} 109 | environment: 110 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 111 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.asyncFunctionHandler 112 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 113 | LAMBDA_TASK_ROOT: ./ 114 | promiseWithDefinedCallbackHandler: 115 | events: 116 | - http: 117 | method: get 118 | path: promise-with-defined-callback-handler 119 | handler: ${env:HANDLER} 120 | environment: 121 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 122 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.promiseWithDefinedCallbackHandler 123 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 124 | LAMBDA_TASK_ROOT: ./ 125 | contextSucceedWithContextDoneHandler: 126 | events: 127 | - http: 128 | method: get 129 | path: context-succeed-with-context-done-handler 130 | handler: ${env:HANDLER} 131 | environment: 132 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 133 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.contextSucceedWithContextDoneHandler 134 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 135 | LAMBDA_TASK_ROOT: ./ 136 | callbackWithContextDoneHandler: 137 | events: 138 | - http: 139 | method: get 140 | path: callback-with-context-done-handler 141 | handler: ${env:HANDLER} 142 | environment: 143 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 144 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.callbackWithContextDoneHandler 145 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 146 | LAMBDA_TASK_ROOT: ./ 147 | callbackWithPromiseHandler: 148 | events: 149 | - http: 150 | method: get 151 | path: callback-with-promise-handler 152 | handler: ${env:HANDLER} 153 | environment: 154 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 155 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.callbackWithPromiseHandler 156 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 157 | LAMBDA_TASK_ROOT: ./ 158 | callbackInsidePromiseHandler: 159 | events: 160 | - http: 161 | method: get 162 | path: callback-inside-promise-handler 163 | handler: ${env:HANDLER} 164 | environment: 165 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 166 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.callbackInsidePromiseHandler 167 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 168 | LAMBDA_TASK_ROOT: ./ 169 | throwExceptionInPromiseHandler: 170 | events: 171 | - http: 172 | method: get 173 | path: throw-exception-in-promise-handler 174 | handler: ${env:HANDLER} 175 | environment: 176 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 177 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.throwExceptionInPromiseHandler 178 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 179 | LAMBDA_TASK_ROOT: ./ 180 | throwExceptionInCallbackHandler: 181 | events: 182 | - http: 183 | method: get 184 | path: throw-exception-in-callback-handler 185 | handler: ${env:HANDLER} 186 | environment: 187 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 188 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.throwExceptionInCallbackHandler 189 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 190 | LAMBDA_TASK_ROOT: ./ 191 | NoAnswerInPromiseHandler: 192 | events: 193 | - http: 194 | method: get 195 | path: no-answer-in-promise-handler 196 | handler: ${env:HANDLER} 197 | environment: 198 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 199 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.NoAnswerInPromiseHandler 200 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 201 | LAMBDA_TASK_ROOT: ./ 202 | BadAnswerInPromiseHandler: 203 | events: 204 | - http: 205 | method: get 206 | path: bad-answer-in-promise-handler 207 | handler: ${env:HANDLER} 208 | environment: 209 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 210 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.BadAnswerInPromiseHandler 211 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 212 | LAMBDA_TASK_ROOT: ./ 213 | BadAnswerInCallbackHandler: 214 | events: 215 | - http: 216 | method: get 217 | path: bad-answer-in-callback-handler 218 | handler: ${env:HANDLER} 219 | environment: 220 | NEW_RELIC_USE_ESM: ${env:NEW_RELIC_USE_ESM} 221 | NEW_RELIC_LAMBDA_HANDLER: ./${env:MODULE_TYPE}/handler.BadAnswerInCallbackHandler 222 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 223 | LAMBDA_TASK_ROOT: ./ 224 | -------------------------------------------------------------------------------- /nodejs/test/unit/cjsErrorStates.tap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const proxyquire = require('proxyquire').noCallThru().noPreserveCache() 5 | const utils = require('@newrelic/test-utilities') 6 | const path = require('node:path') 7 | 8 | const handlerPath = 'test/unit/fixtures/cjs/' 9 | const handlerAndPath = [ 10 | { 11 | handlerFile: 'handler', 12 | handlerMethod: 'handler' 13 | }, 14 | { 15 | handlerFile: undefined, 16 | handlerMethod: undefined 17 | }, 18 | { 19 | handlerFile: 'handler', 20 | handlerMethod: undefined 21 | }, 22 | { 23 | handlerFile: 'notFound', 24 | handlerMethod: 'noMethodFound' 25 | }, 26 | { 27 | handlerFile: 'errors', 28 | handlerMethod: 'noMethodFound' 29 | }, 30 | { 31 | handlerFile: 'errors', 32 | handlerMethod: 'notAfunction' 33 | }, 34 | { 35 | handlerFile: 'badImport', 36 | method: 'handler' 37 | }, 38 | ] 39 | 40 | tap.test('CJS Edge Cases', (t) => { 41 | t.autoend() 42 | let testIndex = 0 43 | t.beforeEach((t) => { 44 | t.context.originalEnv = { ...process.env } 45 | process.env.NEW_RELIC_USE_ESM = 'false' 46 | process.env.LAMBDA_TASK_ROOT = './' 47 | process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED = 'true' // only need to check this once. 48 | 49 | const { handlerFile, handlerMethod } = handlerAndPath[testIndex] 50 | if (handlerFile && handlerMethod) { 51 | process.env.NEW_RELIC_LAMBDA_HANDLER = `${handlerPath}${handlerFile}.${handlerMethod}` 52 | } else if (handlerFile) { 53 | process.env.NEW_RELIC_LAMBDA_HANDLER = `${handlerPath}${handlerFile}` 54 | } 55 | testIndex++ 56 | 57 | const helper = utils.TestAgent.makeInstrumented() 58 | 59 | // Some loading-related errors happen early; to test these, we have to wrap 60 | // in the test assertion, so we can compare the surfaced error to what we expect. 61 | t.context.testFn = () => { 62 | const newrelic = helper.getAgentApi() 63 | 64 | const { handler } = proxyquire('../../index', { 65 | 'newrelic': newrelic 66 | }) 67 | t.context.handler = handler 68 | return handler({ key: 'this is a test'}, { functionName: handlerMethod }) 69 | } 70 | t.context.handlerFile = handlerFile 71 | t.context.handlerMethod = handlerMethod 72 | t.context.helper = helper 73 | }) 74 | 75 | t.afterEach((t) => { 76 | const { originalEnv, helper } = t.context 77 | process.env = { ...originalEnv } 78 | helper.unload() 79 | }) 80 | 81 | t.test('should delete serverless mode env var if defined', (t) => { 82 | const { helper } = t.context 83 | const newrelic = helper.getAgentApi() 84 | 85 | proxyquire('../../index', { 86 | 'newrelic': newrelic 87 | }) 88 | 89 | t.notOk(process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED, 90 | 'NEW_RELIC_SERVERLESS_MODE_ENABLED env var should have been deleted') 91 | t.end() 92 | }) 93 | 94 | t.test('should throw when NEW_RELIC_LAMBDA_HANDLER is missing', (t) => { 95 | const { testFn } = t.context 96 | t.throws( 97 | () => testFn(), 98 | 'No NEW_RELIC_LAMBDA_HANDLER environment variable set.', 99 | ) 100 | t.end() 101 | }) 102 | 103 | t.test('should throw when NEW_RELIC_LAMBDA_HANDLER is malformed', (t) => { 104 | const { testFn } = t.context 105 | t.throws( 106 | () => testFn(), 107 | 'Improperly formatted handler environment variable: test/unit/fixtures/cjs/handler', 108 | ) 109 | t.end() 110 | }) 111 | 112 | t.test('should throw when NEW_RELIC_LAMBDA_HANDLER module cannot be resolved', (t) => { 113 | const { testFn } = t.context 114 | const modulePath = path.resolve('./', handlerPath) 115 | const extensions = ['.cjs', '.js'] 116 | t.throws( 117 | () => testFn(), 118 | `Unable to resolve module file at ${modulePath} with the following extensions: ${extensions.join(',')}` 119 | ) 120 | 121 | t.end() 122 | }) 123 | 124 | t.test('should throw when NEW_RELIC_LAMBDA_HANDLER does not export provided function', (t) => { 125 | const { handlerMethod, testFn } = t.context 126 | t.throws( 127 | () => testFn(), 128 | `Handler '${handlerMethod}' missing on module '${handlerPath}'`, 129 | ) 130 | 131 | t.end() 132 | }) 133 | 134 | t.test('should throw when NEW_RELIC_LAMBDA_HANDLER export is not a function', (t) => { 135 | const { handlerMethod, testFn } = t.context 136 | t.throws( 137 | () => testFn(), 138 | `Handler '${handlerMethod}' from 'test/unit/fixtures/cjs/errors' is not a function`, 139 | ) 140 | 141 | t.end() 142 | }) 143 | 144 | t.test('should throw when NEW_RELIC_LAMBDA_HANDLER throws on import', (t) => { 145 | const { handlerFile, testFn } = t.context 146 | t.throws( 147 | () => testFn(), 148 | `Unable to import module '${handlerPath}${handlerFile}'`, 149 | ) 150 | t.end() 151 | }) 152 | t.end() 153 | }) 154 | -------------------------------------------------------------------------------- /nodejs/test/unit/cjsHandler.tap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const proxyquire = require('proxyquire').noCallThru().noPreserveCache() 5 | const utils = require('@newrelic/test-utilities') 6 | 7 | tap.test('Layer Handler - CJS Function', (t) => { 8 | t.autoend() 9 | 10 | t.beforeEach((t) => { 11 | t.context.originalEnv = { ...process.env } 12 | process.env.NEW_RELIC_USE_ESM = 'false' 13 | process.env.NEW_RELIC_LAMBDA_HANDLER = 'test/unit/fixtures/cjs/handler.handler' 14 | process.env.AWS_LAMBDA_FUNCTION_NAME = 'testFn' 15 | 16 | const helper = utils.TestAgent.makeInstrumented() 17 | 18 | const newrelic = helper.getAgentApi() 19 | 20 | const { handler } = proxyquire('../../index', { 21 | 'newrelic': newrelic 22 | }) 23 | t.context.helper = helper 24 | t.context.handler = handler 25 | }) 26 | 27 | t.afterEach((t) => { 28 | const { helper, originalEnv } = t.context 29 | process.env = { ...originalEnv } 30 | helper.unload() 31 | }) 32 | 33 | t.test('should wrap handler in transaction', async(t) => { 34 | const { handler, helper } = t.context 35 | const promise = new Promise((resolve) => { 36 | helper.agent.on('transactionFinished', (transaction) => { 37 | t.equal(transaction.name, 'OtherTransaction/Function/testFn', 'transaction should be properly named') 38 | resolve() 39 | }) 40 | }) 41 | 42 | t.equal(typeof handler, 'function', 'handler should be a function') 43 | // TODO: Once we release agent this will work 44 | // t.equal(handler[Symbol.for('test.symbol')], 'value', 'should have symbol on wrapped handler') 45 | const res = await handler({ key: 'this is a test'}, { functionName: 'testFn'}) 46 | t.same(res, { statusCode: 200, body: 'response body this is a test' }, 'response should be correct') 47 | await promise 48 | }) 49 | }) 50 | -------------------------------------------------------------------------------- /nodejs/test/unit/esmErrorStates.tap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const utils = require('@newrelic/test-utilities') 5 | const td = require('testdouble') 6 | 7 | const handlerPath = 'test/unit/fixtures/esm/' 8 | const testCases = [ 9 | { 10 | handlerFile: 'handler', 11 | handlerMethod: undefined 12 | }, 13 | { 14 | handlerFile: undefined, 15 | handlerMethod: undefined 16 | }, 17 | { 18 | handlerFile: 'badImport', 19 | method: 'handler' 20 | }, 21 | { 22 | handlerFile: 'notFound', 23 | handlerMethod: 'noMethodFound' 24 | }, 25 | { 26 | handlerFile: 'errors', 27 | handlerMethod: 'noMethodFound' 28 | }, 29 | { 30 | handlerFile: 'errors', 31 | handlerMethod: 'notAfunction' 32 | }, 33 | ] 34 | 35 | tap.test('Early-throwing ESM Edge Cases', (t) => { 36 | t.autoend() 37 | 38 | t.beforeEach(async(t) => { 39 | t.context.originalEnv = { ...process.env } 40 | process.env.LAMBDA_TASK_ROOT = './' 41 | process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED = 'true' // only need to check this once. 42 | 43 | const helper = utils.TestAgent.makeInstrumented() 44 | const newrelic = helper.getAgentApi() 45 | await td.replaceEsm('newrelic', {}, newrelic) 46 | 47 | t.context.helper = helper 48 | }) 49 | 50 | t.afterEach((t) => { 51 | const { helper, originalEnv } = t.context 52 | process.env = { ...originalEnv } 53 | helper.unload() 54 | }) 55 | 56 | for (const test of testCases ) { 57 | const { handlerFile, handlerMethod } = test 58 | let testName = `should reject because 'NEW_RELIC_LAMBDA_HANDLER' is not an expected value for ${handlerPath}` 59 | if (handlerFile) { 60 | testName += handlerFile 61 | } 62 | if (handlerMethod) { 63 | testName += `.${handlerMethod}` 64 | } 65 | 66 | t.test(testName, (t) => { 67 | if (handlerFile && handlerMethod) { 68 | process.env.NEW_RELIC_LAMBDA_HANDLER = `${handlerPath}${handlerFile}.${handlerMethod}` 69 | } else if (handlerFile) { 70 | process.env.NEW_RELIC_LAMBDA_HANDLER = `${handlerPath}${handlerFile}` 71 | } 72 | t.rejects( 73 | async() => { 74 | const { handler } = await import('../../esm.mjs') 75 | return handler({key: 'this is a test'}, {functionName: handlerMethod}) 76 | }, 77 | 'No NEW_RELIC_LAMBDA_HANDLER environment variable set.', 78 | ) 79 | 80 | t.end() 81 | }) 82 | } 83 | }) 84 | -------------------------------------------------------------------------------- /nodejs/test/unit/esmHandler.tap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const utils = require('@newrelic/test-utilities') 5 | const td = require('testdouble') 6 | 7 | tap.test('Layer Handler - ESM Function', (t) => { 8 | t.autoend() 9 | 10 | t.beforeEach(async(t) => { 11 | const originalEnv = { ...process.env } 12 | process.env.NEW_RELIC_LAMBDA_HANDLER = 'test/unit/fixtures/esm/handler.handler' 13 | process.env.AWS_LAMBDA_FUNCTION_NAME = 'testFn' 14 | 15 | const helper = utils.TestAgent.makeInstrumented() 16 | 17 | const newrelic = helper.getAgentApi() 18 | await td.replaceEsm('newrelic', {}, newrelic) 19 | const { handler } = await import('../../esm.mjs') 20 | t.context.helper = helper 21 | t.context.handler = handler 22 | t.context.originalEnv = originalEnv 23 | }) 24 | 25 | t.afterEach((t) => { 26 | const { helper, originalEnv } = t.context 27 | process.env = { ...originalEnv } 28 | helper.unload() 29 | }) 30 | 31 | t.test('should wrap handler in transaction', async(t) => { 32 | const { helper, handler } = t.context 33 | const promise = new Promise((resolve) => { 34 | helper.agent.on('transactionFinished', (transaction) => { 35 | t.equal(transaction.name, 'OtherTransaction/Function/testFn', 'transaction should be properly named') 36 | resolve() 37 | }) 38 | }) 39 | 40 | t.equal(typeof handler, 'function', 'handler should be a function') 41 | // TODO: Once we release agent this will work 42 | // t.equal(handler[Symbol.for('test.symbol')], 'value', 'should have symbol on wrapped handler') 43 | const res = await handler({ key: 'this is a test'}, { functionName: 'testFn'}) 44 | t.same(res, { statusCode: 200, body: 'response body this is a test' }, 'response should be correct') 45 | await promise 46 | }) 47 | }) 48 | -------------------------------------------------------------------------------- /nodejs/test/unit/fixtures/cjs/badRequire.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | // eslint-disable-next-line no-unused-vars 4 | const notFoundDependency = require('path/not/found') 5 | 6 | exports.handler = (event) => { 7 | return { 8 | statusCode: 200, 9 | body: JSON.stringify(event) 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /nodejs/test/unit/fixtures/cjs/errors.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | exports.notAfunction = `This is a string.` 4 | -------------------------------------------------------------------------------- /nodejs/test/unit/fixtures/cjs/handler.js: -------------------------------------------------------------------------------- 1 | "use strict" 2 | const test = Symbol.for('test.symbol') 3 | 4 | // eslint-disable-next-line no-unused-vars 5 | const handler = async function handler(event, context) { 6 | return new Promise((resolve) => { 7 | setTimeout(() => { 8 | resolve({ 9 | "statusCode": 200, 10 | "body": `response body ${event.key}` 11 | }) 12 | }, 100) 13 | }) 14 | } 15 | handler[test] = 'value' 16 | 17 | module.exports.handler = handler 18 | -------------------------------------------------------------------------------- /nodejs/test/unit/fixtures/esm/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "parserOptions": { 3 | "sourceType": "module" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /nodejs/test/unit/fixtures/esm/badImport.mjs: -------------------------------------------------------------------------------- 1 | // eslint-disable-next-line no-unused-vars 2 | import notFoundDependency from 'path/not/found' 3 | 4 | export function handler(event) { 5 | return { 6 | statusCode: 200, 7 | body: JSON.stringify(event) 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /nodejs/test/unit/fixtures/esm/errors.mjs: -------------------------------------------------------------------------------- 1 | export const notAfunction = `This is a string.` 2 | -------------------------------------------------------------------------------- /nodejs/test/unit/fixtures/esm/handler.mjs: -------------------------------------------------------------------------------- 1 | const test = Symbol.for('test.symbol') 2 | // eslint-disable-next-line no-unused-vars 3 | const handler = async function handler(event, context) { 4 | return new Promise((resolve) => { 5 | setTimeout(() => { 6 | resolve({ 7 | "statusCode": 200, 8 | "body": `response body ${event.key}` 9 | }) 10 | }, 100) 11 | }) 12 | } 13 | 14 | handler[test] = 'value' 15 | 16 | export { handler } 17 | -------------------------------------------------------------------------------- /nodejs/test/unit/legacyEsmErrorStates.tap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const proxyquire = require('proxyquire').noCallThru().noPreserveCache() 5 | const utils = require('@newrelic/test-utilities') 6 | 7 | const handlerPath = 'test/unit/fixtures/esm/' 8 | const testCases = [ 9 | { 10 | handlerFile: 'handler', 11 | handlerMethod: undefined, 12 | type: 'throws' 13 | }, 14 | { 15 | handlerFile: undefined, 16 | handlerMethod: undefined, 17 | type: 'throws' 18 | }, 19 | { 20 | handlerFile: 'badImport', 21 | method: 'handler', 22 | type: 'throws' 23 | }, 24 | { 25 | handlerFile: 'notFound', 26 | handlerMethod: 'noMethodFound', 27 | type: 'rejects' 28 | }, 29 | { 30 | handlerFile: 'errors', 31 | handlerMethod: 'noMethodFound', 32 | type: 'rejects' 33 | }, 34 | { 35 | handlerFile: 'errors', 36 | handlerMethod: 'notAfunction', 37 | type: 'rejects' 38 | }, 39 | ] 40 | 41 | tap.test('Early-throwing ESM Edge Cases', (t) => { 42 | t.autoend() 43 | t.beforeEach((t) => { 44 | t.context.originalEnv = { ...process.env } 45 | process.env.NEW_RELIC_USE_ESM = 'true' 46 | process.env.LAMBDA_TASK_ROOT = './' 47 | process.env.NEW_RELIC_SERVERLESS_MODE_ENABLED = 'true' // only need to check this once. 48 | 49 | t.context.helper = utils.TestAgent.makeInstrumented() 50 | }) 51 | 52 | t.afterEach((t) => { 53 | const { helper, originalEnv} = t.context 54 | process.env = { ...originalEnv } 55 | helper.unload() 56 | }) 57 | 58 | for (const test of testCases ) { 59 | const { handlerFile, handlerMethod, type } = test 60 | let testName = `should ${type} because 'NEW_RELIC_LAMBDA_HANDLER' is not an expected value for ${handlerPath}` 61 | if (handlerFile) { 62 | testName += handlerFile 63 | } 64 | if (handlerMethod) { 65 | testName += `.${handlerMethod}` 66 | } 67 | t.test(testName, (t) => { 68 | const { helper } = t.context 69 | if (handlerFile && handlerMethod) { 70 | process.env.NEW_RELIC_LAMBDA_HANDLER = `${handlerPath}${handlerFile}.${handlerMethod}` 71 | } else if (handlerFile) { 72 | process.env.NEW_RELIC_LAMBDA_HANDLER = `${handlerPath}${handlerFile}` 73 | } 74 | 75 | t[type]( 76 | () => { 77 | const newrelic = helper.getAgentApi() 78 | const { handler } = proxyquire('../../index', { 79 | 'newrelic': newrelic 80 | }) 81 | return handler({key: 'this is a test'}, {functionName: handlerMethod}) 82 | }, 83 | 'No NEW_RELIC_LAMBDA_HANDLER environment variable set.', 84 | ) 85 | 86 | t.end() 87 | }) 88 | } 89 | }) 90 | -------------------------------------------------------------------------------- /nodejs/test/unit/legacyEsmHandler.tap.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const proxyquire = require('proxyquire').noCallThru().noPreserveCache() 5 | const utils = require('@newrelic/test-utilities') 6 | 7 | tap.test('Legacy Layer Handler - ESM Function', (t) => { 8 | t.autoend() 9 | t.beforeEach((t) => { 10 | t.context.originalEnv = { ...process.env } 11 | process.env.NEW_RELIC_USE_ESM = 'true' 12 | process.env.NEW_RELIC_LAMBDA_HANDLER = 'test/unit/fixtures/esm/handler.handler' 13 | process.env.AWS_LAMBDA_FUNCTION_NAME = 'testFn' 14 | 15 | const helper = utils.TestAgent.makeInstrumented() 16 | 17 | const newrelic = helper.getAgentApi() 18 | 19 | const { handler } = proxyquire('../../index', { 20 | 'newrelic': newrelic 21 | }) 22 | t.context.helper = helper 23 | t.context.handler = handler 24 | }) 25 | 26 | t.afterEach((t) => { 27 | const { helper, originalEnv } = t.context 28 | process.env = { ...originalEnv } 29 | helper.unload() 30 | }) 31 | 32 | t.test('should wrap handler in transaction', async(t) => { 33 | const { handler, helper } = t.context 34 | const promise = new Promise((resolve) => { 35 | helper.agent.on('transactionFinished', (transaction) => { 36 | t.equal(transaction.name, 'OtherTransaction/Function/testFn', 'transaction should be properly named') 37 | resolve() 38 | }) 39 | }) 40 | 41 | t.equal(typeof handler, 'function', 'handler should be a function') 42 | const res = await handler({ key: 'this is a test'}, { functionName: 'testFn'}) 43 | t.same(res, { statusCode: 200, body: 'response body this is a test' }, 'response should be correct') 44 | await promise 45 | }) 46 | }) 47 | -------------------------------------------------------------------------------- /python/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject -------------------------------------------------------------------------------- /python/newrelic_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | import newrelic_lambda.agent_protocol # noqa 2 | -------------------------------------------------------------------------------- /python/newrelic_lambda/agent_protocol.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | from newrelic.common.encoding_utils import ( 5 | json_encode, 6 | serverless_payload_encode, 7 | ) 8 | 9 | try: 10 | from newrelic.core.agent_protocol import ServerlessModeProtocol 11 | except ImportError: 12 | ServerlessModeProtocol = None 13 | 14 | from newrelic.core.data_collector import ServerlessModeSession 15 | 16 | NAMED_PIPE_PATH = "/tmp/newrelic-telemetry" 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | def put_payload_cloudwatch(payload): 21 | try: 22 | cloudwatch_logging = __import__('newrelic_lambda.cloudwatch_logging', fromlist=['put_log_to_cloudwatch']) 23 | cloudwatch_logging.put_log_to_cloudwatch(payload) 24 | except Exception as e: 25 | print(f"Failed to send payload to CloudWatch: {e}, resorting to print payload.") 26 | print(payload) 27 | 28 | if ServerlessModeProtocol is not None: 29 | # New Relic Agent >=5.16 30 | def protocol_finalize(self): 31 | for key in self.configuration.aws_lambda_metadata: 32 | if key not in self._metadata: 33 | self._metadata[key] = self.configuration.aws_lambda_metadata[key] 34 | 35 | data = self.client.finalize() 36 | 37 | payload = { 38 | "metadata": self._metadata, 39 | "data": data, 40 | } 41 | 42 | encoded = serverless_payload_encode(payload) 43 | payload = json_encode((1, "NR_LAMBDA_MONITORING", encoded)) 44 | 45 | if os.path.exists(NAMED_PIPE_PATH): 46 | try: 47 | with open(NAMED_PIPE_PATH, "w") as named_pipe: 48 | named_pipe.write(payload) 49 | except IOError as e: 50 | logger.error( 51 | "Failed to write to named pipe %s: %s" % (NAMED_PIPE_PATH, e) 52 | ) 53 | else: 54 | if os.getenv("NEW_RELIC_MAX_PAYLOAD", "false").lower() == "true": 55 | put_payload_cloudwatch(payload) 56 | else: 57 | print(payload) 58 | 59 | return payload 60 | 61 | ServerlessModeProtocol.finalize = protocol_finalize 62 | 63 | else: 64 | # New Relic Agent <5.16 65 | def session_finalize(self): 66 | encoded = serverless_payload_encode(self.payload) 67 | payload = json_encode((1, "NR_LAMBDA_MONITORING", encoded)) 68 | 69 | if os.path.exists(NAMED_PIPE_PATH): 70 | try: 71 | with open(NAMED_PIPE_PATH, "w") as named_pipe: 72 | named_pipe.write(payload) 73 | except IOError as e: 74 | logger.error( 75 | "Failed to write to named pipe %s: %s" % (NAMED_PIPE_PATH, e) 76 | ) 77 | else: 78 | print(payload) 79 | 80 | # Clear data after sending 81 | self._data.clear() 82 | return payload 83 | 84 | ServerlessModeSession.finalize = session_finalize 85 | -------------------------------------------------------------------------------- /python/newrelic_lambda/cloudwatch_logging.py: -------------------------------------------------------------------------------- 1 | import os 2 | import boto3 3 | from time import time 4 | 5 | log_group_name = os.getenv("AWS_LAMBDA_LOG_GROUP_NAME", "") 6 | log_stream_name = os.getenv("AWS_LAMBDA_LOG_STREAM_NAME", "") 7 | log_level = os.getenv("NEW_RELIC_LOG_LEVEL", "info").lower() 8 | 9 | def put_log_to_cloudwatch(payload): 10 | logs_client = boto3.client('logs') 11 | 12 | def ensure_log_stream_exists(log_group_name, log_stream_name): 13 | try: 14 | logs_client.create_log_stream(logGroupName=log_group_name, logStreamName=log_stream_name) 15 | except Exception as e: 16 | if log_level == "debug": 17 | print(f"Attempt create log stream {log_stream_name} in log group {log_group_name}: {e}") 18 | 19 | ensure_log_stream_exists(log_group_name, log_stream_name) 20 | 21 | log_event = { 22 | 'timestamp': int(time() * 1000), 23 | 'message': payload, 24 | } 25 | 26 | logs_client.put_log_events( 27 | logGroupName=log_group_name, 28 | logStreamName=log_stream_name, 29 | logEvents=[log_event] 30 | ) 31 | if log_level == "debug": 32 | print(f"Log event successfully sent to CloudWatch: {len(payload)} bytes, log group: {log_group_name}, log stream: {log_stream_name}") 33 | -------------------------------------------------------------------------------- /python/newrelic_lambda/event-sources.json: -------------------------------------------------------------------------------- 1 | { 2 | "alb": { 3 | "attributes": {}, 4 | "name": "alb", 5 | "required_keys": [ 6 | "httpMethod", 7 | "requestContext.elb" 8 | ] 9 | }, 10 | "apiGateway": { 11 | "attributes": { 12 | "aws.lambda.eventSource.accountId": "requestContext.accountId", 13 | "aws.lambda.eventSource.apiId": "requestContext.apiId", 14 | "aws.lambda.eventSource.resourceId": "requestContext.resourceId", 15 | "aws.lambda.eventSource.resourcePath": "requestContext.resourcePath", 16 | "aws.lambda.eventSource.stage": "requestContext.stage" 17 | }, 18 | "name": "apiGateway", 19 | "required_keys": [ 20 | "headers", 21 | "httpMethod", 22 | "path", 23 | "requestContext", 24 | "requestContext.stage" 25 | ] 26 | }, 27 | "cloudFront": { 28 | "attributes": {}, 29 | "name": "cloudFront", 30 | "required_keys": [ 31 | "Records[0].cf" 32 | ] 33 | }, 34 | "cloudWatchScheduled": { 35 | "attributes": { 36 | "aws.lambda.eventSource.account": "account", 37 | "aws.lambda.eventSource.id": "id", 38 | "aws.lambda.eventSource.region": "region", 39 | "aws.lambda.eventSource.resource": "resources[0]", 40 | "aws.lambda.eventSource.time": "time" 41 | }, 42 | "name": "cloudWatch_scheduled", 43 | "required_keys": [ 44 | "detail-type", 45 | "source" 46 | ] 47 | }, 48 | "dynamoStreams": { 49 | "attributes": { 50 | "aws.lambda.eventSource.length": "Records.length" 51 | }, 52 | "name": "dynamo_streams", 53 | "required_keys": [ 54 | "Records[0].dynamodb" 55 | ] 56 | }, 57 | "firehose": { 58 | "attributes": { 59 | "aws.lambda.eventSource.length": "records.length", 60 | "aws.lambda.eventSource.region": "region" 61 | }, 62 | "name": "firehose", 63 | "required_keys": [ 64 | "deliveryStreamArn", 65 | "records[0].kinesisRecordMetadata" 66 | ] 67 | }, 68 | "kinesis": { 69 | "attributes": { 70 | "aws.lambda.eventSource.length": "Records.length", 71 | "aws.lambda.eventSource.region": "Records[0].awsRegion" 72 | }, 73 | "name": "kinesis", 74 | "required_keys": [ 75 | "Records[0].kinesis" 76 | ] 77 | }, 78 | "s3": { 79 | "attributes": { 80 | "aws.lambda.eventSource.bucketName": "Records[0].s3.bucket.name", 81 | "aws.lambda.eventSource.eventName": "Records[0].eventName", 82 | "aws.lambda.eventSource.eventTime": "Records[0].eventTime", 83 | "aws.lambda.eventSource.length": "Records.length", 84 | "aws.lambda.eventSource.objectKey": "Records[0].s3.object.key", 85 | "aws.lambda.eventSource.objectSequencer": "Records[0].s3.object.sequencer", 86 | "aws.lambda.eventSource.objectSize": "Records[0].s3.object.size", 87 | "aws.lambda.eventSource.region": "Records[0].awsRegion" 88 | }, 89 | "name": "s3", 90 | "required_keys": [ 91 | "Records[0].s3" 92 | ] 93 | }, 94 | "ses": { 95 | "attributes": { 96 | "aws.lambda.eventSource.date": "Records[0].ses.mail.commonHeaders.date", 97 | "aws.lambda.eventSource.length": "Records.length", 98 | "aws.lambda.eventSource.messageId": "Records[0].ses.mail.commonHeaders.messageId", 99 | "aws.lambda.eventSource.returnPath": "Records[0].ses.mail.commonHeaders.returnPath" 100 | }, 101 | "name": "ses", 102 | "required_keys": [ 103 | "Records[0].ses" 104 | ] 105 | }, 106 | "sns": { 107 | "attributes": { 108 | "aws.lambda.eventSource.length": "Records.length", 109 | "aws.lambda.eventSource.messageId": "Records[0].Sns.MessageId", 110 | "aws.lambda.eventSource.timestamp": "Records[0].Sns.Timestamp", 111 | "aws.lambda.eventSource.topicArn": "Records[0].Sns.TopicArn", 112 | "aws.lambda.eventSource.type": "Records[0].Sns.Type" 113 | }, 114 | "name": "sns", 115 | "required_keys": [ 116 | "Records[0].Sns" 117 | ] 118 | }, 119 | "sqs": { 120 | "attributes": { 121 | "aws.lambda.eventSource.length": "Records.length" 122 | }, 123 | "name": "sqs", 124 | "required_keys": [ 125 | "Records[0].receiptHandle" 126 | ] 127 | } 128 | } -------------------------------------------------------------------------------- /python/newrelic_lambda_wrapper.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import importlib 4 | import os 5 | import sys 6 | import warnings 7 | 8 | os.environ.setdefault("NEW_RELIC_APP_NAME", os.getenv("AWS_LAMBDA_FUNCTION_NAME", "")) 9 | os.environ.setdefault("NEW_RELIC_NO_CONFIG_FILE", "true") 10 | os.environ.setdefault("NEW_RELIC_DISTRIBUTED_TRACING_ENABLED", "true") 11 | os.environ.setdefault("NEW_RELIC_SERVERLESS_MODE_ENABLED", "true") 12 | os.environ.setdefault( 13 | "NEW_RELIC_TRUSTED_ACCOUNT_KEY", os.getenv("NEW_RELIC_ACCOUNT_ID", "") 14 | ) 15 | os.environ.setdefault("NEW_RELIC_PACKAGE_REPORTING_ENABLED", "false") 16 | 17 | # The agent will load some environment variables on module import so we need 18 | # to perform the import after setting the necessary environment variables. 19 | import newrelic.agent # noqa 20 | from newrelic_lambda.lambda_handler import lambda_handler # noqa 21 | 22 | newrelic.agent.initialize() 23 | 24 | 25 | class IOpipeNoOp(object): 26 | def __call__(self, *args, **kwargs): 27 | warnings.warn( 28 | "Use of context.iopipe.* is no longer supported. " 29 | "Please see New Relic Python agent documentation here: " 30 | "https://docs.newrelic.com/docs/agents/python-agent" 31 | ) 32 | 33 | def __getattr__(self, name): 34 | return IOpipeNoOp() 35 | 36 | 37 | def get_handler(): 38 | if ( 39 | "NEW_RELIC_LAMBDA_HANDLER" not in os.environ 40 | or not os.environ["NEW_RELIC_LAMBDA_HANDLER"] 41 | ): 42 | raise ValueError( 43 | "No value specified in NEW_RELIC_LAMBDA_HANDLER environment variable" 44 | ) 45 | 46 | try: 47 | module_path, handler_name = os.environ["NEW_RELIC_LAMBDA_HANDLER"].rsplit( 48 | ".", 1 49 | ) 50 | except ValueError: 51 | raise ValueError( 52 | "Improperly formated handler value: %s" 53 | % os.environ["NEW_RELIC_LAMBDA_HANDLER"] 54 | ) 55 | 56 | try: 57 | # Use the same check as 58 | # https://github.com/aws/aws-lambda-python-runtime-interface-client/blob/97dee252434edc56be4cafd54a9af1e7fa041eaf/awslambdaric/bootstrap.py#L33 59 | if module_path.split(".")[0] in sys.builtin_module_names: 60 | raise ImportError( 61 | "Cannot use built-in module %s as a handler module" % module_path 62 | ) 63 | 64 | module = importlib.import_module(module_path.replace("/", ".")) 65 | except ImportError as e: 66 | raise ImportError("Failed to import module '%s': %s" % (module_path, e)) 67 | except Exception as e: 68 | raise type(e)(f"Error while importing '{module_path}': {type(e).__name__} {str(e)}").with_traceback(e.__traceback__) 69 | 70 | 71 | try: 72 | handler = getattr(module, handler_name) 73 | except AttributeError: 74 | raise AttributeError( 75 | "No handler '%s' in module '%s'" % (handler_name, module_path) 76 | ) 77 | 78 | return handler 79 | 80 | 81 | # Greedily load the handler during cold start, so we don't pay for it on first invoke 82 | wrapped_handler = get_handler() 83 | 84 | 85 | @lambda_handler() 86 | def handler(event, context): 87 | context.iopipe = IOpipeNoOp() 88 | return wrapped_handler(event, context) 89 | -------------------------------------------------------------------------------- /python/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "devDependencies": { 3 | "serverless": "^3.40.0", 4 | "serverless-offline": "^13.9.0" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /python/publish-layers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -Eeuo pipefail 4 | 5 | BUILD_DIR=python 6 | DIST_DIR=dist 7 | NEWRELIC_AGENT_VERSION="" 8 | VERSION_REGEX="v?([0-9]+\.[0-9]+\.[0-9]+)\.[0-9]+_python" 9 | PY38_DIST_ARM64=$DIST_DIR/python38.arm64.zip 10 | PY39_DIST_ARM64=$DIST_DIR/python39.arm64.zip 11 | PY310_DIST_ARM64=$DIST_DIR/python310.arm64.zip 12 | PY311_DIST_ARM64=$DIST_DIR/python311.arm64.zip 13 | PY312_DIST_ARM64=$DIST_DIR/python312.arm64.zip 14 | PY313_DIST_ARM64=$DIST_DIR/python313.arm64.zip 15 | 16 | PY38_DIST_X86_64=$DIST_DIR/python38.x86_64.zip 17 | PY39_DIST_X86_64=$DIST_DIR/python39.x86_64.zip 18 | PY310_DIST_X86_64=$DIST_DIR/python310.x86_64.zip 19 | PY311_DIST_X86_64=$DIST_DIR/python311.x86_64.zip 20 | PY312_DIST_X86_64=$DIST_DIR/python312.x86_64.zip 21 | PY313_DIST_X86_64=$DIST_DIR/python313.x86_64.zip 22 | 23 | source ../libBuild.sh 24 | 25 | function usage { 26 | echo "./publish-layers.sh [python3.8|python3.9|python3.10|python3.11|python3.12]" 27 | } 28 | 29 | function build_python_layer { 30 | local python_version=$1 31 | local arch=$2 32 | ZIP=$DIST_DIR/python${python_version//./}.${arch}.zip 33 | echo "zip file: ${ZIP}" 34 | echo "Building New Relic layer for python${python_version} (${arch})" 35 | rm -rf $BUILD_DIR $ZIP 36 | mkdir -p $DIST_DIR 37 | 38 | # Determine agent version from git tag 39 | if [[ -z "${GITHUB_REF_NAME}" ]]; then 40 | echo "Unable to determine agent version, GITHUB_REF_NAME environment variable not set." >&2 41 | exit 1; 42 | elif [[ "${GITHUB_REF_NAME}" =~ ${VERSION_REGEX} ]]; then 43 | # Extract the version number from the GITHUB_REF_NAME using regex 44 | NEWRELIC_AGENT_VERSION="${BASH_REMATCH[1]}" 45 | echo "Detected NEWRELIC_AGENT_VERSION: ${NEWRELIC_AGENT_VERSION}" 46 | else 47 | echo "Unable to determine agent version, GITHUB_REF_NAME environment variable did not match regex. GITHUB_REF_NAME: ${GITHUB_REF_NAME}" >&2 48 | exit 1; 49 | fi 50 | 51 | pip install --no-cache-dir -qU "newrelic==${NEWRELIC_AGENT_VERSION}" -t $BUILD_DIR/lib/python${python_version}/site-packages 52 | cp newrelic_lambda_wrapper.py "$BUILD_DIR/lib/python${python_version}/site-packages/newrelic_lambda_wrapper.py" 53 | cp -r newrelic_lambda "$BUILD_DIR/lib/python${python_version}/site-packages/newrelic_lambda" 54 | find $BUILD_DIR -name '__pycache__' -exec rm -rf {} + 55 | 56 | download_extension $arch 57 | zip -rq $ZIP $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 58 | rm -rf $BUILD_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 59 | 60 | echo "Build complete: ${ZIP}" 61 | } 62 | 63 | 64 | function publish_python_layer { 65 | local python_version=$1 66 | local arch=$2 67 | ZIP=$DIST_DIR/python${python_version//./}.${arch}.zip 68 | 69 | if [ ! -f ${ZIP} ]; then 70 | echo "Package not found: ${ZIP}" 71 | exit 1 72 | fi 73 | 74 | if [[ "${arch}" == "arm64" ]]; then 75 | REGIONS=("${REGIONS_ARM[@]}"); 76 | else 77 | REGIONS=("${REGIONS_X86[@]}"); 78 | fi 79 | 80 | for region in "${REGIONS[@]}"; do 81 | echo "Publishing layer for python${python_version} (${arch}) to region ${region}" 82 | publish_layer ${ZIP} $region python${python_version} ${arch} $NEWRELIC_AGENT_VERSION 83 | done 84 | } 85 | 86 | 87 | case "$1" in 88 | "python3.8") 89 | build_python_layer 3.8 arm64 90 | publish_python_layer 3.8 arm64 91 | publish_docker_ecr $PY38_DIST_ARM64 python3.8 arm64 92 | build_python_layer 3.8 x86_64 93 | publish_python_layer 3.8 x86_64 94 | publish_docker_ecr $PY38_DIST_X86_64 python3.8 x86_64 95 | ;; 96 | "python3.9") 97 | build_python_layer 3.9 arm64 98 | publish_python_layer 3.9 arm64 99 | publish_docker_ecr $PY39_DIST_ARM64 python3.9 arm64 100 | build_python_layer 3.9 x86_64 101 | publish_python_layer 3.9 x86_64 102 | publish_docker_ecr $PY39_DIST_X86_64 python3.9 x86_64 103 | ;; 104 | "python3.10") 105 | build_python_layer 3.10 arm64 106 | publish_python_layer 3.10 arm64 107 | publish_docker_ecr $PY310_DIST_ARM64 python3.10 arm64 108 | build_python_layer 3.10 x86_64 109 | publish_python_layer 3.10 x86_64 110 | publish_docker_ecr $PY310_DIST_X86_64 python3.10 x86_64 111 | ;; 112 | "python3.11") 113 | build_python_layer 3.11 arm64 114 | publish_python_layer 3.11 arm64 115 | publish_docker_ecr $PY311_DIST_ARM64 python3.11 arm64 116 | build_python_layer 3.11 x86_64 117 | publish_python_layer 3.11 x86_64 118 | publish_docker_ecr $PY311_DIST_X86_64 python3.11 x86_64 119 | ;; 120 | "python3.12") 121 | build_python_layer 3.12 arm64 122 | publish_python_layer 3.12 arm64 123 | publish_docker_ecr $PY312_DIST_ARM64 python3.12 arm64 124 | build_python_layer 3.12 x86_64 125 | publish_python_layer 3.12 x86_64 126 | publish_docker_ecr $PY312_DIST_X86_64 python3.12 x86_64 127 | ;; 128 | "python3.13") 129 | build_python_layer 3.13 arm64 130 | publish_python_layer 3.13 arm64 131 | publish_docker_ecr $PY313_DIST_ARM64 python3.13 arm64 132 | build_python_layer 3.13 x86_64 133 | publish_python_layer 3.13 x86_64 134 | publish_docker_ecr $PY313_DIST_X86_64 python3.13 x86_64 135 | ;; 136 | *) 137 | usage 138 | ;; 139 | esac 140 | -------------------------------------------------------------------------------- /python/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | from threading import Timer, Event 4 | 5 | import pytest 6 | 7 | 8 | CWD = os.path.dirname(__file__) 9 | SERVERLESS = os.path.realpath(os.path.join(CWD, "../node_modules/serverless/bin/serverless.js")) 10 | TIMEOUT=60 11 | 12 | 13 | @pytest.fixture(scope="session", autouse=True) 14 | def start_serverless_offline(): 15 | with subprocess.Popen([SERVERLESS, "offline", "start"], cwd=CWD, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process: 16 | timed_out = Event() 17 | def timeout(): 18 | process.kill() 19 | timed_out.set() 20 | raise RuntimeError("Timeout exceeded.") 21 | 22 | timer = Timer(TIMEOUT, timeout) 23 | try: 24 | timer.start() 25 | while not timed_out.is_set(): 26 | line = process.stderr.readline().decode("utf-8") 27 | # Wait for server to be ready 28 | if 'Server ready:' in line: 29 | break 30 | finally: 31 | timer.cancel() 32 | 33 | if timed_out.is_set(): 34 | raise RuntimeError("Timed out waiting for serverless to start.") 35 | 36 | # Return ready server from fixture 37 | yield process 38 | 39 | # Kill process on completion 40 | process.kill() 41 | -------------------------------------------------------------------------------- /python/tests/serverless.yml: -------------------------------------------------------------------------------- 1 | service: integration-tests 2 | frameworkVersion: '3' 3 | 4 | plugins: 5 | - serverless-offline 6 | 7 | custom: 8 | PYTHON_RUNTIME: ${env:PYTHON_RUNTIME, 'python3.10'} 9 | 10 | provider: 11 | name: aws 12 | runtime: ${self:custom.PYTHON_RUNTIME} 13 | 14 | functions: 15 | testhandler: 16 | events: 17 | - http: 18 | method: get 19 | path: "/" 20 | handler: src/handler.handler 21 | environment: 22 | NEW_RELIC_LAMBDA_HANDLER: src/handler.handler 23 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 24 | LAMBDA_TASK_ROOT: ./ -------------------------------------------------------------------------------- /python/tests/src/handler.py: -------------------------------------------------------------------------------- 1 | def handler(event, context): 2 | print("Running handler.") 3 | return { 4 | "statusCode": 200, 5 | "body": "{}" 6 | } -------------------------------------------------------------------------------- /python/tests/test_lambda_handler.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | 4 | def test_lamdba_handler(start_serverless_offline): 5 | response = requests.get("http://localhost:3000/dev", timeout=10) 6 | assert response.status_code == 200, str([line.decode("utf-8") for line in start_serverless_offline.stderr.readlines()]) 7 | 8 | -------------------------------------------------------------------------------- /ruby/.gitignore: -------------------------------------------------------------------------------- 1 | .bundle/ 2 | dist/ 3 | log/ 4 | support/node_modules/ 5 | support/log/ 6 | Gemfile.lock 7 | -------------------------------------------------------------------------------- /ruby/.rubocop.yml: -------------------------------------------------------------------------------- 1 | AllCops: 2 | NewCops: enable 3 | TargetRubyVersion: 3.2 4 | Exclude: 5 | - 'test/support/node_modules/**/*' 6 | 7 | Lint/UnusedMethodArgument: 8 | Exclude: 9 | - 'test/support/src/handler.rb' 10 | -------------------------------------------------------------------------------- /ruby/Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source 'https://rubygems.org' 4 | 5 | gem 'newrelic_rpm' 6 | 7 | group :development do 8 | gem 'minitest', '~> 5.22' 9 | gem 'rake' 10 | gem 'rubocop', '~> 1.60' 11 | end 12 | -------------------------------------------------------------------------------- /ruby/README.md: -------------------------------------------------------------------------------- 1 | # New Relic AWS Lambda layer for wrapping Ruby Lambda functions 2 | 3 | ## Overview 4 | 5 | This 'ruby' subdirectory contains content for building and publishing a 6 | New Relic's AWS Lambda layer for use with Ruby Lambda functions. 7 | 8 | The layer will include the latest stable version of the New Relic Ruby agent gem, 9 | `newrelic_rpm`, the latest New Relic Lambda 10 | [extension](https://github.com/newrelic/newrelic-lambda-extension), 11 | and the lightweight `newrelic_lambda_wrapper.rb` that wraps untouched customer 12 | Lambda functions for observability. 13 | 14 | A layer is created for every region, architecture, and Ruby runtime combination. 15 | 16 | 17 | ## Layer building and publishing 18 | 19 | With Ruby v3.2 or v3.3 or v3.4 `bundle` binary in your path: 20 | 21 | ```shell 22 | ./publish_layers.sh ruby3.2 23 | 24 | # or 25 | 26 | ./publish_layers.sh ruby3.3 27 | 28 | # or 29 | 30 | ./publish_layers.sh ruby3.4 31 | ``` 32 | 33 | 34 | ## Developer instructions 35 | 36 | - Clone the repository and change to the 'ruby' directory 37 | - Run `bin/setup` 38 | - Run `bundle exec rake test` to run the unit tests 39 | 40 | NOTES: 41 | 42 | - To test Ruby agent changes that are available locally or on GitHub, alter 43 | `Gemfile` accordingly. 44 | - `Gemfile.lock` is Git ignored so that the latest stable agent version is 45 | always fetched 46 | - For alignment with the Node.js and Python content in this repository, the 47 | [Serverless](https://www.serverless.com/) Node.js module and the 48 | [serverless-offline](https://github.com/dherault/serverless-offline) plugin 49 | are used for simulated AWS Lambda testing. 50 | - All Serverless related content can be found at `test/support` 51 | - A simple representation of a customer Lambda function is available at 52 | `test/support/src/handler.rb`. 53 | - The customer Lambda function is referenced via the `NEW_RELIC_LAMBDA_HANDLER` 54 | environment variable defined in `test/support/serverless.yml. That variable 55 | is formatted as `.`, 56 | with the path part of the string optionally including the `.rb` file extension 57 | part of the path. 58 | - While it is recommended that the system building new layers be set up to run 59 | the same Ruby version as the layer is targetting, a simple `mv` hack currently 60 | exists in `publish-layers.sh` that would allow say an instance of Ruby 3.3 to 61 | build a Ruby 3.2 layer. 62 | - By default, the `ruby/publish-layers.sh` script will use the prebuilt extension version defined at `libBuild.sh` for the `EXTENSION_VERSION` variable. That variable can be changed to use other extension versions. To instead build a new extension from scratch, edit `ruby/publish-layers.sh` and point to a local git clone of the extension. 63 | -------------------------------------------------------------------------------- /ruby/Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'minitest/test_task' 4 | 5 | Minitest::TestTask.create(:test) do |t| 6 | t.libs << 'test' 7 | t.test_globs = ['test/**/*_test.rb'] 8 | end 9 | 10 | task default: :test 11 | -------------------------------------------------------------------------------- /ruby/bin/clean: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -Eeuo pipefail 3 | 4 | # for cleanup when conducting manual .zip file creation 5 | 6 | rm -rf .bundle dist ruby Gemfile.lock extensions preview-extensions* 7 | -------------------------------------------------------------------------------- /ruby/bin/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -Eeuo pipefail 3 | 4 | bin/clean 5 | 6 | if (command -v bundle >/dev/null); then 7 | rm -rf .bundle 8 | bundle install 9 | else 10 | echo "'bundle' command not found!" 11 | exit -1 12 | fi 13 | 14 | cd test/support 15 | npm install 16 | 17 | echo 'Setup complete' 18 | -------------------------------------------------------------------------------- /ruby/newrelic_lambda_wrapper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | ENV['NEW_RELIC_DISTRIBUTED_TRACING_ENABLED'] ||= 'true' 4 | ENV['AWS_LAMBDA_FUNCTION_NAME'] ||= 'lambda_function' 5 | ENV['NEW_RELIC_APP_NAME'] ||= ENV.fetch('AWS_LAMBDA_FUNCTION_NAME', nil) 6 | ENV['NEW_RELIC_TRUSTED_ACCOUNT_KEY'] = ENV.fetch('NEW_RELIC_ACCOUNT_ID', '') 7 | 8 | # The customer's Lambda function is configured to point to this file and its 9 | # `handler` method. 10 | # 11 | # The customer's original handler string is expected to be found as the value 12 | # for the 'NEW_RELIC_LAMBDA_HANDLER' environment variable. That string is 13 | # parsed into its individual file path and Ruby method components. The path 14 | # is passed to a `require` call and then the method (along with its optional 15 | # namespace) is handed to the New Relic Ruby agent's `ServerlessHandler` class. 16 | # 17 | # The `ServerlessHandler` class will start a New Relic transaction, invoke the 18 | # customer's method, and observe its execution. Any activity related to 19 | # instrumented Ruby libraries involved by the customer's method, any Ruby logger 20 | # calls, and any exceptions raised will all be reported to New Relic. 21 | # 22 | # The customer's original handler string is in the format of `.` 23 | # - `path` holds the absolute filesytem path to a Ruby file 24 | # - `path` can optionally leave off the `.rb` file extension 25 | # - If Ruby was to `load` the file at `path`, the specified `method` would 26 | # then be defined 27 | # - The `method` can be defined at the toplevel namespace or within a 28 | # module and/or class namespace 29 | # - The `path` can contain dots (.) in either the directory names or 30 | # file names 31 | # 32 | # example 1: 33 | # handler_string = '/opt/my_company/lambda.my_handler' 34 | # 35 | # - a file exists at '/opt/my_company/lambda.rb' 36 | # - lambda.rb has the following content: 37 | # ``` 38 | # def my_handler(event:, context:); end 39 | # ``` 40 | # 41 | # example 2: 42 | # handler_string = '/var/custom/serverless.rb.MyCompany::MyClass.handler' 43 | # 44 | # - a file exists at '/var/custom/serverless.rb' 45 | # - serverless.rb has the following content (note the class level method) 46 | # ``` 47 | # module MyCompany 48 | # class MyClass 49 | # def self.handler(event:, context:); end 50 | # end 51 | # end 52 | # 53 | class NewRelicLambdaWrapper 54 | HANDLER_VAR = 'NEW_RELIC_LAMBDA_HANDLER' 55 | NR_LAYER_GEM_PATH = "/opt/ruby/gems/#{RUBY_VERSION.rpartition('.').first}.0/gems".freeze 56 | 57 | def self.adjust_load_path 58 | return unless Dir.exist?(NR_LAYER_GEM_PATH) 59 | 60 | Dir.glob(File.join(NR_LAYER_GEM_PATH, '*', 'lib')).each do |gem_lib_dir| 61 | $LOAD_PATH.push(gem_lib_dir) unless $LOAD_PATH.include?(gem_lib_dir) 62 | end 63 | end 64 | 65 | def self.require_ruby_agent 66 | adjust_load_path 67 | require 'newrelic_rpm' 68 | rescue StandardError => e 69 | raise "#{self.class.name}: failed to require New Relic layer provided gem(s) - #{e}" 70 | end 71 | 72 | def self.method_name_and_namespace 73 | @method_name_and_namespace ||= parse_customer_handler_string 74 | rescue StandardError => e 75 | raise "#{self.class.name}: failed to prep the Lambda function to be wrapped - #{e}" 76 | end 77 | 78 | # Parse the handler string into its individual components. Load the Ruby file 79 | # and return the customer handler method name and its namespace. 80 | # 81 | # '/path/to/file.method' -> ['method', nil] 82 | # '/path/to/file.MyModule::MyClass.method' -> ['method', 'MyModule::MyClass'] 83 | # 84 | def self.parse_customer_handler_string 85 | handler_string = ENV.fetch(HANDLER_VAR, nil) 86 | raise "Environment variable '#{HANDLER_VAR}' is not set!" unless handler_string 87 | 88 | elements = handler_string.split('.') 89 | ridx = determine_ridx(elements) 90 | file = elements[0..ridx].join('.') 91 | method_string = elements[(ridx + 1)..].join('.') 92 | 93 | require_source_file(file) 94 | 95 | method_string.split('.').reverse 96 | end 97 | private_class_method :parse_customer_handler_string 98 | 99 | def self.determine_ridx(elements) 100 | if elements.size == 1 101 | raise "Failed to parse the '#{HANDLER_VAR}' env var which is expected to be in '.' format!" 102 | end 103 | 104 | elements.size > 2 ? -3 : -2 105 | end 106 | private_class_method :determine_ridx 107 | 108 | def self.require_source_file(path) 109 | path = "#{path}.rb" unless path.end_with?('.rb') 110 | path = "#{Dir.pwd}/#{path}" unless path.start_with?('/') 111 | raise "Path '#{path}' does not exist or is not readable" unless File.exist?(path) && File.readable?(path) 112 | 113 | require_relative path 114 | end 115 | private_class_method :require_source_file 116 | end 117 | 118 | # warm the memoization cache so that the very first customer method invocation 119 | # isn't made to wait 120 | NewRelicLambdaWrapper.require_ruby_agent 121 | NewRelicLambdaWrapper.method_name_and_namespace 122 | 123 | def handler(event:, context:) 124 | method_name, namespace = NewRelicLambdaWrapper.method_name_and_namespace 125 | NewRelic::Agent.agent.serverless_handler.invoke_lambda_function_with_new_relic(event:, 126 | context:, 127 | method_name:, 128 | namespace:) 129 | end 130 | -------------------------------------------------------------------------------- /ruby/publish-layers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -Eeuo pipefail 4 | 5 | # This script creates and publishes an AWS Lambda Ruby layer .zip file for each 6 | # supported architecture. The Ruby content does not change between 7 | # architectures or targeted Ruby versions, but the included Go based AWS Lambda 8 | # extension does. The .zip files are written to dist/ and from there they are 9 | # uploaded to AWS via functionality defined in the ../libBuild.sh script. 10 | # 11 | # Each .zip file is structured like so for Ruby: 12 | # ruby/gems/.0 -- AWS sets this as GEM_PATH. It's where the agent lives 13 | # ruby/lib -- AWS sets this as RUBYLIB. It's where the wrapper script lives 14 | # extensions/ -- Where the NR Go based extension content lives 15 | # preview-extensions-* -- Extensions preview file 16 | 17 | RUBY_DIR=ruby 18 | DIST_DIR=dist 19 | WRAPPER_FILE=newrelic_lambda_wrapper.rb 20 | # Set this to a path to a clone of newrelic-lambda-extension to build 21 | # an extension from scratch instead of downloading one. Set the path to '' 22 | # to simply download a prebuilt one. 23 | # EXTENSION_CLONE_PATH='../../newrelic-lambda-extension_fallwith' 24 | EXTENSION_CLONE_PATH='' 25 | 26 | # Distribution paths for ARM64 27 | RB32_DIST_ARM64=$DIST_DIR/ruby32.arm64.zip 28 | RB33_DIST_ARM64=$DIST_DIR/ruby33.arm64.zip 29 | RB34_DIST_ARM64=$DIST_DIR/ruby34.arm64.zip 30 | 31 | # Distribution paths for X86_64 32 | RB32_DIST_X86_64=$DIST_DIR/ruby32.x86_64.zip 33 | RB33_DIST_X86_64=$DIST_DIR/ruby33.x86_64.zip 34 | RB34_DIST_X86_64=$DIST_DIR/ruby34.x86_64.zip 35 | 36 | source ../libBuild.sh 37 | 38 | function usage { 39 | echo "./publish-layers.sh [ruby3.2|ruby3.3|ruby3.4]" 40 | } 41 | 42 | function build-ruby32-arm64 { 43 | build_ruby_for_arch 3.2 'arm64' $RB32_DIST_ARM64 44 | } 45 | 46 | function build-ruby33-arm64 { 47 | build_ruby_for_arch 3.3 'arm64' $RB33_DIST_ARM64 48 | } 49 | 50 | function build-ruby34-arm64 { 51 | build_ruby_for_arch 3.4 'arm64' $RB34_DIST_ARM64 52 | } 53 | 54 | 55 | function build-ruby32-x86 { 56 | build_ruby_for_arch 3.2 'x86_64' $RB32_DIST_X86_64 57 | } 58 | 59 | function build-ruby33-x86 { 60 | build_ruby_for_arch 3.3 'x86_64' $RB33_DIST_X86_64 61 | } 62 | 63 | function build-ruby34-x86 { 64 | build_ruby_for_arch 3.4 'x86_64' $RB34_DIST_X86_64 65 | } 66 | 67 | function publish-ruby32-arm64 { 68 | publish_ruby_for_arch 3.2 'arm64' $RB32_DIST_ARM64 69 | } 70 | 71 | function publish-ruby33-arm64 { 72 | publish_ruby_for_arch 3.3 'arm64' $RB33_DIST_ARM64 73 | } 74 | 75 | function publish-ruby34-arm64 { 76 | publish_ruby_for_arch 3.4 'arm64' $RB34_DIST_ARM64 77 | } 78 | 79 | function publish-ruby32-x86 { 80 | publish_ruby_for_arch 3.2 'x86_64' $RB32_DIST_X86_64 81 | } 82 | 83 | function publish-ruby33-x86 { 84 | publish_ruby_for_arch 3.3 'x86_64' $RB33_DIST_X86_64 85 | } 86 | 87 | function publish-ruby34-x86 { 88 | publish_ruby_for_arch 3.4 'x86_64' $RB34_DIST_X86_64 89 | } 90 | 91 | function build_ruby_for_arch { 92 | local ruby_version=$1 93 | local arch=$2 94 | # dynamic filenames are harder to grab for other consumers of this script 95 | # local dist_file="$DIST_DIR/ruby${ruby_version//./}.$arch.zip" 96 | local dist_file=$3 97 | 98 | echo "Building New Relic layer for ruby v$ruby_version ($arch)" 99 | 100 | rm -rf $RUBY_DIR $dist_file 101 | mkdir -p $DIST_DIR 102 | 103 | bundle config set --local without development 104 | bundle config set --local path . # Bundler will create a 'ruby' dir beneath here 105 | bundle install 106 | 107 | local base_dir="$RUBY_DIR/gems/$ruby_version.0" 108 | 109 | # Bundler will have created ./ruby//gems 110 | # AWS wants ./ruby/gems/ 111 | # So we need to flip the directory structure around and also use the right 112 | # Ruby version. For building, we insist on the same major Ruby version but 113 | # are relaxed on the minor version. 114 | mkdir $RUBY_DIR/gems 115 | 116 | # allow Ruby versions other than the target version to bundle 117 | # so if Bundler used Ruby v3.3 and the target is v3.2, the '3.3.0' dir 118 | # gets renamed to '3.2.0' 119 | mv $RUBY_DIR/${ruby_version:0:1}* $base_dir 120 | 121 | for sub_dir in 'bin' 'build_info' 'cache' 'doc' 'extensions' 'plugins'; do 122 | rm -rf $base_dir/$sub_dir 123 | done 124 | 125 | mkdir -p $RUBY_DIR/lib 126 | cp $WRAPPER_FILE $RUBY_DIR/lib 127 | 128 | # if Gemfile points Bundler to GitHub for the agent, discard extraneous repo 129 | # content and repackage the vendored gem content as if it were sourced 130 | # from RubyGems.org 131 | if [[ -e "$base_dir/bundler" ]]; then 132 | local phony_version=$(date +'%s') 133 | mkdir -p $base_dir/gems # dir will exist if non-agent, non-dev gems are in Gemfile 134 | local nr_dir=$base_dir/gems/newrelic_rpm-$phony_version 135 | mv $base_dir/bundler/gems/newrelic-ruby-agent* $nr_dir 136 | rm -rf $base_dir/bundler 137 | mkdir $base_dir/specifications 138 | echo -e "Gem::Specification.new {|s| s.name = 'newrelic_rpm'; s.version = '$phony_version'}" > $base_dir/specifications/newrelic_rpm-$phony_version.gemspec 139 | for sub_dir in '.git' '.github' '.gitignore' '.rubocop.yml' '.rubocop_todo.yml' '.simplecov' '.snyk' '.yardopts' 'Brewfile' 'config' 'CONTRIBUTING.md' 'docker-compose.yml' 'DOCKER.md' 'Dockerfile' 'Gemfile' 'Guardfile' 'infinite_tracing' 'init.rb' 'install.rb' 'lefthook.yml' 'newrelic.yml' 'README.md' 'test' 'THIRD_PARTY_NOTICES.md' 'Thorfile' 'recipes' '.build_ignore'; do 140 | rm -rf "$nr_dir/$sub_dir" 141 | done fi 142 | 143 | if [ "$EXTENSION_CLONE_PATH" == "" ]; then 144 | echo "Downloading prebuilt extension..." 145 | download_extension $arch 146 | else 147 | echo "Building an extension from a local clone..." 148 | here=$PWD 149 | cd "$EXTENSION_CLONE_PATH" 150 | make "dist-$arch" 151 | mv "$EXTENSION_DIST_DIR" "$here/$EXTENSION_DIST_DIR" 152 | mv "$EXTENSION_DIST_PREVIEW_FILE" "$here/$EXTENSION_DIST_PREVIEW_FILE" 153 | cd $here 154 | fi 155 | 156 | zip -rq $dist_file $RUBY_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 157 | rm -rf $RUBY_DIR $EXTENSION_DIST_DIR $EXTENSION_DIST_PREVIEW_FILE 158 | echo "Build complete: ${dist_file}" 159 | } 160 | 161 | function publish_ruby_for_arch { 162 | local ruby_version=$1 163 | local arch=$2 164 | local dist_file=$3 165 | 166 | for region in "${REGIONS_X86[@]}"; do 167 | echo "Publishing $dist_file for region=$region, ruby=$ruby_version, arch=$arch" 168 | publish_layer $dist_file $region "ruby${ruby_version}" $arch 169 | done 170 | echo 'Publishing complete' 171 | } 172 | 173 | set +u # permit $1 to be unbound so that '*' matches it when no args are present 174 | case "$1" in 175 | "ruby3.4") 176 | build-ruby34-arm64 177 | publish-ruby34-arm64 178 | publish_docker_ecr $RB34_DIST_ARM64 ruby3.4 arm64 179 | build-ruby34-x86 180 | publish-ruby34-x86 181 | publish_docker_ecr $RB34_DIST_X86_64 ruby3.4 x86_64 182 | ;; 183 | "ruby3.3") 184 | build-ruby33-arm64 185 | publish-ruby33-arm64 186 | publish_docker_ecr $RB33_DIST_ARM64 ruby3.3 arm64 187 | build-ruby33-x86 188 | publish-ruby33-x86 189 | publish_docker_ecr $RB33_DIST_X86_64 ruby3.3 x86_64 190 | ;; 191 | "ruby3.2") 192 | build-ruby32-arm64 193 | publish-ruby32-arm64 194 | publish_docker_ecr $RB32_DIST_ARM64 ruby3.2 arm64 195 | build-ruby32-x86 196 | publish-ruby32-x86 197 | publish_docker_ecr $RB32_DIST_X86_64 ruby3.2 x86_64 198 | ;; 199 | *) 200 | usage 201 | ;; 202 | esac 203 | -------------------------------------------------------------------------------- /ruby/test/lambda_wrapper_integration_test.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'fileutils' 4 | require 'minitest/autorun' 5 | require 'minitest/pride' 6 | require 'net/http' 7 | 8 | # LambdaWrapperIntegrationTest - tests to confirm the successful New Relic 9 | # wrapping of unaltered customer Lambda 10 | # functions via a complete integration process 11 | # that includes a customer function, the 12 | # wrapper script, the New Relic Ruby agent, 13 | # and the 'serverless' Node.js module behaving 14 | # as an AWS Lambda service 15 | class LambdaWrapperIntegrationTest < Minitest::Test 16 | METADATA_PATTERN = /"agent_language":"ruby"/ 17 | SERVERLESS_ROOT = 'test/support' 18 | SERVERLESS_OUTPUT_FILE = 'serverless_log' 19 | SERVERLESS_CMD = "cd #{SERVERLESS_ROOT} && node_modules/serverless/bin/serverless.js " \ 20 | "offline start >#{SERVERLESS_OUTPUT_FILE} 2>&1".freeze 21 | SERVERLESS_URI = URI('http://localhost:3000/dev') 22 | 23 | def setup 24 | remove_serverless_output_file 25 | @serverless_pid = nil 26 | @serverless_thread = Thread.new { @serverless_pid = Process.spawn(SERVERLESS_CMD) } 27 | puts 'Giving the serverless process time to start...' 28 | sleep 10 29 | end 30 | 31 | def teardown 32 | child_pid = `pgrep -P #{@serverless_pid} | head -1`.chomp 33 | Process.kill('KILL', @serverless_pid) if @serverless_pid 34 | Process.kill('KILL', child_pid.to_i) if child_pid.match?(/^\d+$/) 35 | @serverless_thread&.kill 36 | remove_serverless_output_file 37 | end 38 | 39 | def remove_serverless_output_file 40 | FileUtils.rm_f(serverless_output_file_path) 41 | end 42 | 43 | def serverless_output_file_path 44 | File.join(SERVERLESS_ROOT, SERVERLESS_OUTPUT_FILE) 45 | end 46 | 47 | # serverless.yml should be configured to point to the wrapper 48 | # with an env var that points to the customer function to be wrapped 49 | def test_wrapped_customer_function 50 | response = Net::HTTP.get(SERVERLESS_URI) 51 | 52 | # confirm that the customer's handler output has been returned 53 | assert_equal 'handled', response 54 | 55 | # confirm that the New Relic agent has generated one or more payloads 56 | # from having wrapped the customer function 57 | data = File.read(serverless_output_file_path).split("\n") 58 | nr_payload = data.detect { |line| line.start_with?('[') } 59 | refute_nil nr_payload 60 | 61 | assert_match METADATA_PATTERN, nr_payload 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /ruby/test/lambda_wrapper_parsing_test.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'minitest/autorun' 4 | require 'minitest/pride' 5 | 6 | # LambdaWrapperParsingTest - The tests defined by this class do not involve 7 | # the New Relic Ruby agent. Instead, they test the 8 | # wrapper script's handler string parsing 9 | # functionality to ensure that all supported 10 | # formats work well and that all unhappy paths to 11 | # exceptions are verified as working. 12 | # 13 | # NOTE: Simply loading the wrapper script causes it to parse the handler string 14 | # from an environment variable and memoize it. This is by design so that 15 | # everything is loaded and ready prior to an actual customer invocation 16 | # of their wrapped method. That means that these tests have to repeatedly 17 | # reset the ENV hash and `load` the wrapper script over and over again, 18 | # which unfortunately leads to Ruby warnings about constants and methods 19 | # being redefined. 20 | class LambdaWrapperUnitTest < Minitest::Test 21 | def test_handler_string_parse_works_for_toplevel_methods 22 | expected_method_name = 'my_handler' 23 | expected_path = '/opt/my_company/lambda' 24 | handler_string = "#{expected_path}.#{expected_method_name}" 25 | 26 | method_name, namespace = wrapper_parse(handler_string, expected_path) 27 | 28 | assert_equal expected_method_name, method_name 29 | refute namespace 30 | end 31 | 32 | def test_handler_string_parse_works_for_namespaced_methods 33 | expected_path = '/var/custom/serverless.rb' 34 | expected_method_name = 'handler' 35 | expected_namespace = 'MyCompany::MyClass' 36 | handler_string = "#{expected_path}.#{expected_namespace}.#{expected_method_name}" 37 | 38 | method_name, namespace = wrapper_parse(handler_string, expected_path) 39 | 40 | assert_equal expected_method_name, method_name 41 | assert_equal expected_namespace, namespace 42 | end 43 | 44 | def test_handler_string_parse_works_when_dots_exist_in_the_path 45 | expected_path = '/v.ar/custo.m/server.less.rb' 46 | expected_method_name = 'handler' 47 | expected_namespace = 'MyCompany::MyClass' 48 | handler_string = "#{expected_path}.#{expected_namespace}.#{expected_method_name}" 49 | 50 | method_name, namespace = wrapper_parse(handler_string, expected_path) 51 | 52 | assert_equal expected_method_name, method_name 53 | assert_equal expected_namespace, namespace 54 | end 55 | 56 | def test_handler_string_parse_raises_if_the_env_var_is_missing 57 | reset_wrapper 58 | 59 | assert_raises(RuntimeError, /Environment variable/) do 60 | load "#{File.dirname(__FILE__)}/../newrelic_lambda_wrapper.rb" 61 | end 62 | end 63 | 64 | def test_handler_string_parse_raises_if_the_handler_string_is_not_formatted_correctly 65 | assert_raises(RuntimeError, /expected to be in/) do 66 | wrapper_parse('dotless', 'File::NULL') 67 | end 68 | end 69 | 70 | def test_handler_string_parse_raises_if_the_handler_string_has_a_bad_path_value 71 | assert_raises(RuntimeError, /does not exist or is not readable/) do 72 | wrapper_parse('/a/bad/path.handler', '/a/bad/path', stub_path: false) 73 | end 74 | end 75 | 76 | private 77 | 78 | def wrapper_parse(handler_string, expected_path, stub_path: true) 79 | reset_wrapper 80 | 81 | oenv = ENV.to_hash 82 | 83 | ENV['NEW_RELIC_LAMBDA_HANDLER'] = handler_string 84 | 85 | load_wrapper(expected_path, stub_path) 86 | 87 | NewRelicLambdaWrapper.instance_variable_get :@method_name_and_namespace 88 | ensure 89 | ENV.replace oenv 90 | end 91 | 92 | def reset_wrapper 93 | if defined?(NewRelicLambdaWrapper) && NewRelicLambdaWrapper.instance_variable_get(:@method_name_and_namespace) 94 | NewRelicLambdaWrapper.remove_instance_variable :@method_name_and_namespace 95 | end 96 | end 97 | 98 | def load_wrapper(expected_path, stub_path) 99 | return stubbed_wrapper_load(expected_path) if stub_path 100 | 101 | load "#{File.dirname(__FILE__)}/../newrelic_lambda_wrapper.rb" 102 | end 103 | 104 | def stubbed_wrapper_load(path) 105 | path = "#{path}.rb" unless path.end_with?('.rb') 106 | File.stub :exist?, true, [path] do 107 | File.stub :readable?, true, [path] do 108 | Object.stub :require_relative, nil, [path] do 109 | load "#{File.dirname(__FILE__)}/../newrelic_lambda_wrapper.rb" 110 | end 111 | end 112 | end 113 | end 114 | end 115 | -------------------------------------------------------------------------------- /ruby/test/support/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Offline serverless function wrapping tester", 3 | "devDependencies": { 4 | "serverless": "^3.40.0", 5 | "serverless-offline": "^13.9.0" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /ruby/test/support/serverless.yml: -------------------------------------------------------------------------------- 1 | service: integration-tests 2 | frameworkVersion: '3' 3 | 4 | plugins: 5 | - serverless-offline 6 | 7 | custom: 8 | RUBY_RUNTIME: ${env:RUBY_RUNTIME, 'ruby3.2'} 9 | 10 | provider: 11 | name: aws 12 | runtime: ${self:custom.RUBY_RUNTIME} 13 | 14 | functions: 15 | testhandler: 16 | events: 17 | - http: 18 | method: get 19 | path: '/' 20 | handler: ../../newrelic_lambda_wrapper.handler 21 | environment: 22 | AWS_LAMBDA_FUNCTION_NAME: CalvinAndHobbes 23 | NEW_RELIC_LAMBDA_HANDLER: src/handler.An::Example.handler 24 | NEW_RELIC_LAMBDA_EXTENSION_ENABLED: false 25 | -------------------------------------------------------------------------------- /ruby/test/support/src/handler.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # An::Example - this class provides a simple example of a customer's 4 | # Lambda handler to be wrapped by New Relic. 5 | # 6 | # A method can be defined in Ruby's toplevel (Object) namespace 7 | # or within a class such as the one used here. When defined 8 | # within a class, the method is expected to be a class (self.) 9 | # level method. 10 | # 11 | # There are no requirements imposed by either AWS or New Relic 12 | # on top of standard Ruby requirements for module, class, and 13 | # method naming. 14 | # 15 | # This example handler method will return a string body that 16 | # can be inspected by the unit tests to confirm that it reaches 17 | # the client caller even when wrapped by New Relic. 18 | module An 19 | class Example 20 | def self.handler(event:, context:) 21 | puts 'Running handler' 22 | { statusCode: 200, body: 'handled' } 23 | end 24 | end 25 | end 26 | --------------------------------------------------------------------------------