├── .clang-format
├── .gitattributes
├── .github
├── PULL_REQUEST_TEMPLATE.md
├── actions
│ ├── extract_versions
│ │ └── action.yml
│ ├── pr_comment
│ │ └── action.yml
│ └── setup_cached_java
│ │ └── action.yml
├── scripts
│ ├── cppcheck-gh.xslt
│ ├── cppcheck-html.xslt
│ ├── cppcheck-suppressions.txt
│ ├── java_setup.sh
│ ├── prepare_reports.sh
│ ├── python_utils.py
│ ├── release.sh
│ └── test_alpine_aarch64.sh
└── workflows
│ ├── add-milestone-to-pull-requests.yaml
│ ├── approve-trivial.yml
│ ├── cache_java.yml
│ ├── ci.yml
│ ├── codecheck.yml
│ ├── create-next-milestone.yaml
│ ├── gh_release.yml
│ ├── increment-milestones-on-tag.yaml
│ ├── nightly.yml
│ ├── release.yml
│ ├── test_workflow.yml
│ └── update_assets.yml
├── .gitignore
├── .gitlab-ci.yml
├── CHANGELOG.md
├── LICENSE
├── README.md
├── build.gradle
├── common.gradle
├── ddprof-lib
├── benchmarks
│ ├── build.gradle
│ ├── build_run.sh
│ └── src
│ │ ├── benchmarkConfig.h
│ │ └── unwindFailuresBenchmark.cpp
├── build.gradle
├── gtest
│ └── build.gradle
├── settings.gradle
└── src
│ ├── main
│ ├── cpp
│ │ ├── arch_dd.h
│ │ ├── arguments.cpp
│ │ ├── arguments.h
│ │ ├── asyncSampleMutex.h
│ │ ├── buffers.h
│ │ ├── callTraceStorage.cpp
│ │ ├── callTraceStorage.h
│ │ ├── codeCache.cpp
│ │ ├── codeCache.h
│ │ ├── common.h
│ │ ├── context.cpp
│ │ ├── context.h
│ │ ├── counters.cpp
│ │ ├── counters.h
│ │ ├── ctimer.h
│ │ ├── ctimer_linux.cpp
│ │ ├── debugSupport.cpp
│ │ ├── debugSupport.h
│ │ ├── dictionary.cpp
│ │ ├── dictionary.h
│ │ ├── dwarf.cpp
│ │ ├── dwarf.h
│ │ ├── engine.cpp
│ │ ├── engine.h
│ │ ├── event.h
│ │ ├── flightRecorder.cpp
│ │ ├── flightRecorder.h
│ │ ├── frame.h
│ │ ├── itimer.cpp
│ │ ├── itimer.h
│ │ ├── j9Ext.cpp
│ │ ├── j9Ext.h
│ │ ├── j9WallClock.cpp
│ │ ├── j9WallClock.h
│ │ ├── javaApi.cpp
│ │ ├── javaApi.h
│ │ ├── jfrMetadata.cpp
│ │ ├── jfrMetadata.h
│ │ ├── jniHelper.h
│ │ ├── jvm.cpp
│ │ ├── jvm.h
│ │ ├── jvmHeap.h
│ │ ├── libraries.cpp
│ │ ├── libraries.h
│ │ ├── linearAllocator.cpp
│ │ ├── linearAllocator.h
│ │ ├── livenessTracker.cpp
│ │ ├── livenessTracker.h
│ │ ├── log.cpp
│ │ ├── log.h
│ │ ├── objectSampler.cpp
│ │ ├── objectSampler.h
│ │ ├── os.h
│ │ ├── os_linux.cpp
│ │ ├── os_macos.cpp
│ │ ├── perfEvents.h
│ │ ├── perfEvents_linux.cpp
│ │ ├── pidController.cpp
│ │ ├── pidController.h
│ │ ├── profiler.cpp
│ │ ├── profiler.h
│ │ ├── reservoirSampler.h
│ │ ├── rustDemangler.cpp
│ │ ├── rustDemangler.h
│ │ ├── safeAccess.h
│ │ ├── spinLock.h
│ │ ├── stackFrame.h
│ │ ├── stackWalker_dd.h
│ │ ├── symbols.h
│ │ ├── symbols_linux.cpp
│ │ ├── symbols_linux.h
│ │ ├── symbols_macos.cpp
│ │ ├── thread.cpp
│ │ ├── thread.h
│ │ ├── threadFilter.cpp
│ │ ├── threadFilter.h
│ │ ├── threadInfo.cpp
│ │ ├── threadInfo.h
│ │ ├── threadLocalData.h
│ │ ├── threadState.h
│ │ ├── tsc.cpp
│ │ ├── tsc.h
│ │ ├── unwindStats.cpp
│ │ ├── unwindStats.h
│ │ ├── vmEntry.cpp
│ │ ├── vmEntry.h
│ │ ├── vmStructs_dd.cpp
│ │ ├── vmStructs_dd.h
│ │ ├── wallClock.cpp
│ │ └── wallClock.h
│ └── java
│ │ └── com
│ │ └── datadoghq
│ │ └── profiler
│ │ ├── Arch.java
│ │ ├── ContextSetter.java
│ │ ├── JVMAccess.java
│ │ ├── JavaProfiler.java
│ │ ├── LibraryLoader.java
│ │ ├── Main.java
│ │ ├── OperatingSystem.java
│ │ └── Platform.java
│ └── test
│ ├── cpp
│ ├── ddprof_ut.cpp
│ ├── demangle_ut.cpp
│ └── elfparser_ut.cpp
│ ├── make
│ └── Makefile
│ └── resources
│ └── native-libs
│ ├── reladyn-lib
│ ├── Makefile
│ └── reladyn.c
│ ├── small-lib
│ ├── Makefile
│ ├── small_lib.cpp
│ └── small_lib.h
│ └── unresolved-functions
│ ├── Makefile
│ ├── linker.ld
│ ├── main.c
│ └── readme.txt
├── ddprof-stresstest
├── build.gradle
└── src
│ └── jmh
│ └── java
│ └── com
│ └── datadoghq
│ └── profiler
│ └── stresstest
│ ├── AbstractFormatter.java
│ ├── CompositeFormatter.java
│ ├── Configuration.java
│ ├── Formatter.java
│ ├── HtmlCommentFormatter.java
│ ├── HtmlFormatter.java
│ ├── Main.java
│ ├── WhiteboxProfiler.java
│ └── scenarios
│ ├── CapturingLambdas.java
│ ├── DumpRecording.java
│ ├── GraphMutation.java
│ ├── GraphState.java
│ ├── NanoTime.java
│ └── TracedParallelWork.java
├── ddprof-test-tracer
├── build.gradle
└── src
│ └── main
│ └── java
│ └── com
│ └── datadoghq
│ └── profiler
│ └── context
│ ├── ContextExecutor.java
│ ├── ContextTask.java
│ ├── RegisteringThreadFactory.java
│ └── Tracing.java
├── ddprof-test
├── build.gradle
└── src
│ └── test
│ └── java
│ └── com
│ └── datadoghq
│ └── profiler
│ ├── AbstractProcessProfilerTest.java
│ ├── AbstractProfilerTest.java
│ ├── CStackAwareAbstractProfilerTest.java
│ ├── ExternalLauncher.java
│ ├── JVMAccessTest.java
│ ├── JavaProfilerTest.java
│ ├── MoreAssertions.java
│ ├── MuslDetectionTest.java
│ ├── PlatformTest.java
│ ├── alloc
│ └── AllocationProfilerTest.java
│ ├── classgc
│ └── ClassGCTest.java
│ ├── context
│ └── TagContextTest.java
│ ├── cpu
│ ├── CTimerSamplerTest.java
│ ├── ContextCpuTest.java
│ ├── IOBoundCode.java
│ ├── LightweightContextCpuTest.java
│ ├── ProfiledCode.java
│ └── SmokeCpuTest.java
│ ├── endpoints
│ └── EndpointTest.java
│ ├── filter
│ └── ThreadFilterSmokeTest.java
│ ├── jfr
│ ├── CpuDumpSmokeTest.java
│ ├── JfrDumpTest.java
│ ├── ObjectSampleDumpSmokeTest.java
│ └── WallclockDumpSmokeTest.java
│ ├── junit
│ ├── CStack.java
│ ├── CStackInjector.java
│ └── RetryTest.java
│ ├── loadlib
│ └── LoadLibraryTest.java
│ ├── memleak
│ ├── GCGenerationsTest.java
│ └── MemleakProfilerTest.java
│ ├── metadata
│ ├── BoundMethodHandleMetadataSizeTest.java
│ └── MetadataNormalisationTest.java
│ ├── nativelibs
│ └── NativeLibrariesTest.java
│ ├── queue
│ └── QueueTimeTest.java
│ ├── settings
│ └── DatadogSettingsTest.java
│ ├── shutdown
│ └── ShutdownTest.java
│ └── wallclock
│ ├── BaseContextWallClockTest.java
│ ├── CollapsingSleepTest.java
│ ├── ContendedWallclockSamplesTest.java
│ ├── ContextWallClockTest.java
│ ├── JvmtiBasedContextWallClockTest.java
│ ├── JvmtiBasedWallClockThreadFilterTest.java
│ ├── MegamorphicCallTest.java
│ ├── SleepTest.java
│ ├── SmokeWallTest.java
│ └── WallClockThreadFilterTest.java
├── gradle
├── ap-lock.properties
├── configurations.gradle
├── enforcement
│ ├── .clang-format
│ ├── codenarc.groovy
│ ├── codenarcTest.groovy
│ ├── spotless-groovy.properties
│ ├── spotless-scalafmt.conf
│ └── spotless-xml.properties
├── sanitizers
│ ├── asan.supp
│ ├── tsan.supp
│ └── ubsan.supp
├── scm.gradle
├── semantic-version.gradle
├── spotless.gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── legacy_tests
├── include.sh
├── load-libraries-test.sh
├── loadlibs
│ ├── com
│ │ └── datadoghq
│ │ │ └── loader
│ │ │ └── DynamicLibraryLoader.java
│ ├── com_datadoghq_loader_DynamicLibraryLoader.cpp
│ ├── com_datadoghq_loader_DynamicLibraryLoader.h
│ ├── increment.cpp
│ └── increment.h
├── run_renaissance.sh
└── test-all.sh
├── malloc-shim
├── build.gradle
├── settings.gradle
└── src
│ └── main
│ ├── cpp
│ └── malloc_intercept.cpp
│ └── public
│ └── debug.h
├── pom.xml
├── settings.gradle
├── test
├── native
│ ├── libs
│ │ └── reladyn.c
│ └── symbolsLinuxTest.cpp
└── test
│ └── nativemem
│ └── malloc_plt_dyn.c
└── utils
├── cherry.sh
└── init_cherypick_repo.sh
/.clang-format:
--------------------------------------------------------------------------------
1 | ---
2 | Language: Cpp
3 | BasedOnStyle: LLVM
4 | IndentWidth: 4
5 | ColumnLimit: 100
6 | BreakBeforeBraces: Attach
7 | AllowShortFunctionsOnASingleLine: None
8 | AllowShortIfStatementsOnASingleLine: false
9 | AllowShortLoopsOnASingleLine: false
10 | AllowShortBlocksOnASingleLine: false
11 | AllowShortCaseLabelsOnASingleLine: false
12 | AllowShortNamespacesOnASingleLine: false
13 | AllowShortStructsOnASingleLine: false
14 | AllowShortEnumsOnASingleLine: false
15 | AllowShortLambdasOnASingleLine: false
16 | AllowShortCompoundStatementsOnASingleLine: false
17 | AllowShortAlwaysBreakType: None
18 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.sh eol=lf
2 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | **What does this PR do?**:
2 |
3 |
4 | **Motivation**:
5 |
6 |
7 | **Additional Notes**:
8 |
9 |
10 | **How to test the change?**:
11 |
17 |
18 | **For Datadog employees**:
19 | - [ ] If this PR touches code that signs or publishes builds or packages, or handles
20 | credentials of any kind, I've requested a review from `@DataDog/security-design-and-guidance`.
21 | - [ ] This PR doesn't touch any of that.
22 | - [ ] JIRA: [JIRA-XXXX]
23 |
24 | Unsure? Have a question? Request a review!
25 |
--------------------------------------------------------------------------------
/.github/actions/extract_versions/action.yml:
--------------------------------------------------------------------------------
1 | name: Extract Java and Gradle Versions
2 | description: Versions are reported in JAVA_VERSION and GRADLE_VERSION environment variables
3 |
4 | runs:
5 | using: "composite"
6 | steps:
7 | - name: Extract Versions
8 | id: extract_versions
9 | shell: bash
10 | run: |
11 | set +e
12 |
13 | # Extract Java version
14 | ${{ env.JAVA_TEST_HOME }}/bin/java -version
15 | JAVA_VERSION=$(${{ env.JAVA_TEST_HOME }}/bin/java -version 2>&1 | awk -F '"' '/version/ {
16 | split($2, v, "[._]");
17 | if (v[2] == "") {
18 | # Version is like "24": assume it is major only and add .0.0
19 | printf "%s.0.0\n", v[1]
20 | } else if (v[1] == "1") {
21 | # Java 8 or older: Format is "1.major.minor_update"
22 | printf "%s.%s.%s\n", v[2], v[3], v[4]
23 | } else {
24 | # Java 9 or newer: Format is "major.minor.patch"
25 | printf "%s.%s.%s\n", v[1], v[2], v[3]
26 | }
27 | }')
28 | echo "JAVA_VERSION=${JAVA_VERSION}"
29 | echo "JAVA_VERSION=${JAVA_VERSION}" >> $GITHUB_ENV
30 |
31 | # Extract Gradle version from gradle-wrapper.properties
32 | gradle_version=$(grep 'distributionUrl' gradle/wrapper/gradle-wrapper.properties | cut -d'=' -f2)
33 | gradle_version=${gradle_version#*gradle-}
34 | gradle_version=${gradle_version%-bin.zip}
35 | echo "GRADLE_VERSION=${gradle_version}" >> $GITHUB_ENV
--------------------------------------------------------------------------------
/.github/actions/pr_comment/action.yml:
--------------------------------------------------------------------------------
1 | name: "Add or Update Comment"
2 | description: "Adds a comment or updates it based on the given text id"
3 |
4 | inputs:
5 | github-token:
6 | description: "Github token associated with the request"
7 | required: true
8 | commenter:
9 | description: "The commenter identity"
10 | required: true
11 | comment-id:
12 | description: "The text uniquely identifying the comment to update"
13 | required: true
14 | comment-file:
15 | description: "The file containing the comment in HTML format"
16 | required: true
17 |
18 | runs:
19 | using: composite
20 | steps:
21 | - name: Add or Update GitHub comment
22 | uses: actions/github-script@v5
23 | with:
24 | github-token: ${{ inputs.github-token }}
25 | script: |
26 | const fs = require('fs');
27 | const comment_id = "${{ inputs.comment-id }}";
28 | const commenter = "${{ inputs.commenter }}"
29 | const new_comment = fs.readFileSync("${{ inputs.comment-file }}", 'utf8');
30 |
31 | // List all comments in the issue
32 | const comments = await github.rest.issues.listComments({
33 | owner: context.repo.owner,
34 | repo: context.repo.repo,
35 | issue_number: context.issue.number
36 | });
37 |
38 | const id = ``
39 | const content = `${id} \n 🔧 Report generated by ${commenter}\n${new_comment}`
40 | // Find the comment with the search text
41 | const comment = comments.data.find(c => c.body.includes(id));
42 |
43 | if (comment) {
44 | // Update the comment
45 | await github.rest.issues.updateComment({
46 | owner: context.repo.owner,
47 | repo: context.repo.repo,
48 | comment_id: comment.id,
49 | body: content
50 | });
51 | } else {
52 | // Add a new comment
53 | await github.rest.issues.createComment({
54 | owner: context.repo.owner,
55 | repo: context.repo.repo,
56 | issue_number: context.issue.number,
57 | body: content
58 | });
59 | }
--------------------------------------------------------------------------------
/.github/actions/setup_cached_java/action.yml:
--------------------------------------------------------------------------------
1 | name: "Setup test Java environment"
2 | description: "Setup Java environment for testing"
3 |
4 | inputs:
5 | version:
6 | description: "The test JDK version to install"
7 | required: true
8 | default: "11"
9 | arch:
10 | description: "The architecture"
11 | required: true
12 | default: "amd64"
13 |
14 | runs:
15 | using: composite
16 | steps:
17 | - name: Infer Build JDK
18 | shell: bash
19 | id: infer_build_jdk
20 | run: |
21 | echo "Infering JDK 11 [${{ inputs.arch }}]"
22 | if [[ ${{ inputs.arch }} =~ "-musl" ]]; then
23 | echo "build_jdk=jdk11-librca" >> $GITHUB_OUTPUT
24 | else
25 | echo "build_jdk=jdk11" >> $GITHUB_OUTPUT
26 | fi
27 | - name: Cache Build JDK [${{ inputs.arch }}]
28 | id: cache_build_jdk
29 | uses: actions/cache/restore@v4
30 | with:
31 | path: |
32 | jdks/${{ steps.infer_build_jdk.outputs.build_jdk }}
33 | key: ${{ steps.infer_build_jdk.outputs.build_jdk }}-${{ inputs.arch }}--${{ hashFiles('.github/workflows/cache_java.yml', '.github/scripts/java_setup.sh') }}
34 | restore-keys: |
35 | ${{ steps.infer_build_jdk.outputs.build_jdk }}-${{ inputs.arch }}--
36 | enableCrossOsArchive: true
37 | - name: Cache JDK ${{ inputs.version }} [${{ inputs.arch }}]
38 | id: cache_jdk
39 | uses: actions/cache/restore@v4
40 | with:
41 | path: |
42 | jdks/jdk${{ inputs.version }}
43 | key: jdk${{ inputs.version }}-${{ inputs.arch }}--${{ hashFiles('.github/workflows/cache_java.yml', '.github/scripts/java_setup.sh') }}
44 | restore-keys: |
45 | jdk${{ inputs.version }}-${{ inputs.arch }}--
46 | enableCrossOsArchive: true
47 | - name: JDK cache miss
48 | if: steps.cache_jdk.outputs.cache-hit != 'true' || steps.cache_build_jdk.outputs.cache-hit != 'true'
49 | shell: bash
50 | run: |
51 | # well, the cache-hit is not alway set to 'true', even when cache is hit (but it is not freshly recreated, whatever that means)
52 | if [ ! -d "jdks/jdk${{ inputs.version }}" ]; then
53 | OWNER=${{ github.repository_owner }}
54 | REPO=${{ github.event.repository.name }}
55 | BRANCH=${{ github.ref_name }}
56 | WORKFLOW="cache_java.yml"
57 |
58 | URL="https://github.com/$OWNER/$REPO/actions/workflows/$WORKFLOW"
59 |
60 | echo "### ⚠️ JDK Cache Miss Detected" >> $GITHUB_STEP_SUMMARY
61 | echo "🛠️ [Click here and select ${BRANCH} branch to manually refresh the cache](<$URL>)" >> $GITHUB_STEP_SUMMARY
62 | exit 1
63 | fi
64 | - name: Setup Environment
65 | shell: bash
66 | run: |
67 | chmod a+rx -R jdks
68 | echo "Setting up JDK ${{ inputs.version }} [${{ inputs.arch }}]"
69 | JAVA_HOME=$(pwd)/jdks/${{ steps.infer_build_jdk.outputs.build_jdk }}
70 | JAVA_TEST_HOME=$(pwd)/jdks/jdk${{ inputs.version }}
71 | PATH=$JAVA_HOME/bin:$PATH
72 | echo "JAVA_HOME=$JAVA_HOME" >> $GITHUB_ENV
73 | echo "JAVA_TEST_HOME=$JAVA_TEST_HOME" >> $GITHUB_ENV
74 | echo "PATH=$JAVA_HOME/bin:$PATH" >> $GITHUB_ENV
75 |
--------------------------------------------------------------------------------
/.github/scripts/cppcheck-suppressions.txt:
--------------------------------------------------------------------------------
1 | cstyleCast
2 | constParameter
3 | obsoleteFunctions:flightRecorder.cpp
4 | obsoleteFunctionsalloca:flightRecorder.cpp
--------------------------------------------------------------------------------
/.github/scripts/java_setup.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function prepareJdk() {
4 | local variant=$1
5 | local arch=$2
6 | local version=${variant%%-*}
7 | local qualifier=${variant#*-}
8 |
9 | local target_path="${GITHUB_WORKSPACE}/${JDKS_DIR}/jdk${variant}"
10 |
11 | mkdir -p ${target_path}
12 |
13 | if [[ ${qualifier} == "librca" ]] && [[ "${arch}" =~ "-musl" ]]; then
14 | local osarch="${arch%-musl}"
15 | local suffix=
16 | if [[ "${osarch}" == "aarch64" ]]; then
17 | suffix="AARCH64_"
18 | fi
19 | URL_VAR="JAVA_${version}_MUSL_${suffix}URL"
20 | URL="${!URL_VAR}"
21 | if [[ -z "${URL}" ]]; then
22 | echo "Musl/Liberica JDK URL not found for ${arch}/${variant}"
23 | exit 1
24 | fi
25 | curl -L --fail "${URL}" | tar -xvzf - -C ${target_path} --strip-components 1
26 | return
27 | fi
28 |
29 | if [[ ${qualifier} == "orcl" ]]; then
30 | if [[ ${version} == "8" ]]; then
31 | mkdir -p "${target_path}"
32 | curl -L --fail "${JAVA_8_ORACLE_URL}" | sudo tar -xvzf - -C ${target_path} --strip-components 1
33 | return
34 | else
35 | echo "Oracle JDK 8 only!"
36 | exit 1
37 | fi
38 | fi
39 |
40 | if [[ ${qualifier} == "ibm" ]]; then
41 | if [[ ${version} == "8" ]]; then
42 | mkdir -p "${target_path}"
43 | curl -L --fail "${JAVA_8_IBM_URL}" | sudo tar -xvzf - -C ${target_path} --strip-components 2
44 | return
45 | else
46 | echo "IBM JDK 8 only!"
47 | exit 1
48 | fi
49 | fi
50 |
51 | if [[ ${qualifier} == "zing" ]]; then
52 | URL_VAR="JAVA_${version}_ZING_URL"
53 | if [[ "${arch}" == "aarch64" ]]; then
54 | URL_VAR="JAVA_${version}_ZING_AARCH64_URL"
55 | fi
56 |
57 | URL="${!URL_VAR}"
58 | if [[ -z "${URL}" ]]; then
59 | echo "Zing JDK URL not found for ${variant}"
60 | exit 1
61 | fi
62 | curl -L --fail "${URL}" | sudo tar -xvzf - -C ${target_path} --strip-components 1
63 | if [[ "${arch}" != "aarch64" ]]; then
64 | # rename the bundled libstdc++.so to avoid conflicts with the system one
65 | sudo mv ${target_path}/etc/libc++/libstdc++.so.6 ${target_path}/etc/libc++/libstdc++.so.6.bak
66 | fi
67 | return
68 | fi
69 |
70 | # below the installation of the SDKMAN-managed JDK
71 | source ~/.sdkman/bin/sdkman-init.sh
72 |
73 | local suffix="tem"
74 | local versionVar="JAVA_${version}_VERSION"
75 | if [[ "${qualifier}" == "j9" ]]; then
76 | suffix="sem"
77 | versionVar="JAVA_${version}_J9_VERSION"
78 | elif [[ "${qualifier}" == "graal" ]]; then
79 | suffix="graal"
80 | versionVar="JAVA_${version}_GRAAL_VERSION"
81 | fi
82 |
83 | local distro_base
84 | distro_base="${!versionVar}"
85 | local jdk_distro="${distro_base}-${suffix}"
86 |
87 | echo 'n' | sdk install java ${jdk_distro} > /dev/null
88 |
89 | rm -rf ${target_path}
90 | mkdir -p "$(dirname ${target_path})"
91 | mv ${SDKMAN_DIR}/candidates/java/${jdk_distro} ${target_path}
92 | }
93 |
--------------------------------------------------------------------------------
/.github/scripts/prepare_reports.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 | mkdir -p reports
5 | cp /tmp/hs_err* reports/ || true
6 | cp ddprof-test/javacore*.txt reports/ || true
7 | cp ddprof-test/build/hs_err* reports/ || true
8 | cp -r ddprof-lib/build/tmp reports/native_build || true
9 | cp -r ddprof-test/build/reports/tests reports/tests || true
10 | cp -r /tmp/recordings reports/recordings || true
11 | find ddprof-lib/build -name 'libjavaProfiler.*' -exec cp {} reports/ \; || true
12 |
--------------------------------------------------------------------------------
/.github/scripts/python_utils.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from bs4 import BeautifulSoup
3 |
4 | def remove_tags(soup, tags_to_remove):
5 | for tag in tags_to_remove:
6 | for element in soup.find_all(tag):
7 | element.decompose()
8 |
9 | def create_scanbuild_code_links(soup, target_branch):
10 | target = None
11 | for element in soup.find_all("td"):
12 | clz = element.get('class')
13 | if clz is None:
14 | src = element.text
15 | target = element
16 | elif 'Q' in clz and target is not None and target.text != 'Function/Method':
17 | line = element.text
18 | link = soup.new_tag('a', href=f'https://github.com/DataDog/java-profiler/blob/{target_branch}/ddprof-lib/src/main/cpp/{src}#L{line}')
19 | link.string = src
20 | target.clear()
21 | target.append(link)
22 | target = None
23 | def parse_table(table):
24 | markdown_table = []
25 | for row in table.find_all('tr'):
26 | cells = row.find_all(['th', 'td'])
27 | markdown_cells = [cell.get_text(strip=True) for cell in cells]
28 | markdown_table.append('| ' + ' | '.join(markdown_cells) + ' |')
29 | return '\n'.join(markdown_table)
30 |
31 | def scanbuild_cleanup(soup, args):
32 | target_branch = args[0]
33 | remove_tags(soup, ["title", "script", "a"])
34 | create_scanbuild_code_links(soup, target_branch)
35 | title = soup.find('h1')
36 | title.string = 'Scan-Build Report'
37 | return str(soup)
38 |
39 | def cppcheck_cleanup(soup, args):
40 | remove_tags(soup, ["title", "style", "head"])
41 | return str(soup)
42 |
43 | def usage(soup, args):
44 | return "Usage"
45 |
46 | if __name__ == "__main__":
47 | actions = {
48 | "scanbuild_cleanup": scanbuild_cleanup,
49 | "cppcheck_cleanup": cppcheck_cleanup,
50 | }
51 | action = actions.get(sys.argv[1], usage)
52 | input_file = sys.argv[2]
53 | args = sys.argv[3:]
54 |
55 | with open(input_file, "r") as file:
56 | html_content = file.read()
57 |
58 | soup = BeautifulSoup(html_content, "html.parser")
59 | print(action(soup, args))
60 |
--------------------------------------------------------------------------------
/.github/scripts/release.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -x
4 | set -e
5 |
6 | TYPE=$1
7 | DRYRUN=$2
8 |
9 | BRANCH=$(git branch --show-current)
10 | RELEASE_BRANCH=
11 |
12 | BASE=$(./gradlew printVersion -Psnapshot=false | grep 'Version:' | cut -f2 -d' ')
13 | # BASE == 0.0.1
14 |
15 | if [ "$TYPE" == "MINOR" ] || [ "$TYPE" == "MAJOR" ]; then
16 | if [ "$BRANCH" != "main" ] && [ -z "$DRYRUN" ]; then
17 | echo "Major or minor release can be performed only from 'main' branch."
18 | exit 1
19 | fi
20 | if [ "$TYPE" == "MAJOR" ]; then
21 | # 0.1.0 -> 1.0.0
22 | ./gradlew incrementVersion --versionIncrementType=MAJOR
23 | BASE=$(./gradlew printVersion -Psnapshot=false | grep 'Version:' | cut -f2 -d' ')
24 | # BASE == 1.0.0
25 | fi
26 | RELEASE_BRANCH="release/${BASE%.*}._"
27 | git tag -f v_$BASE
28 | fi
29 |
30 | if [ "$TYPE" == "PATCH" ]; then
31 | if [[ ! $BRANCH =~ ^release\/[0-9]+\.[0-9]+\._$ ]] && [ -z "$DRYRUN" ]; then
32 | echo "Patch release can be created only for 'release/*' branch."
33 | exit 1
34 | fi
35 | RELEASE_BRANCH="release/${BASE%.*}._"
36 | fi
37 |
38 | if [ "$BRANCH" != "$RELEASE_BRANCH" ]; then
39 | git checkout -b $RELEASE_BRANCH
40 | if ! git diff --quiet; then
41 | git add build.gradle
42 | git commit -m "[Automated] Release ${BASE}"
43 | fi
44 | git push $DRYRUN --atomic --set-upstream origin $RELEASE_BRANCH
45 | git checkout $BRANCH
46 | fi
47 |
48 | if [ "$TYPE" == "MAJOR" ] || [ "$TYPE" == "MINOR" ]; then
49 | ./gradlew incrementVersion --versionIncrementType=MINOR
50 | else
51 | ./gradlew incrementVersion --versionIncrementType=PATCH
52 | fi
53 |
54 | CANDIDATE=$(./gradlew printVersion -Psnapshot=false | grep 'Version:' | cut -f2 -d' ')
55 |
56 | git add build.gradle
57 | git commit -m "[Automated] Bump dev version to ${CANDIDATE}"
58 |
59 | git push $DRYRUN --atomic --set-upstream origin $BRANCH
60 | git push $DRYRUN -f --atomic --tags
61 |
--------------------------------------------------------------------------------
/.github/scripts/test_alpine_aarch64.sh:
--------------------------------------------------------------------------------
1 | #! /bin/sh
2 |
3 | set -e
4 | set +x
5 |
6 | export KEEP_JFRS=true
7 | export TEST_COMMIT="${1}"
8 | export TEST_CONFIGURATION="${2}"
9 | export LIBRARY="musl"
10 | export CONFIG="${3}"
11 | export JAVA_HOME="${4}"
12 | export JAVA_TEST_HOME="${5}"
13 |
14 | export PATH="${JAVA_HOME}/bin":${PATH}
15 |
16 | # due to env hell in GHA containers, we need to re-do the logic from Extract Versions here
17 | JAVA_VERSION=$("${JAVA_TEST_HOME}/bin/java" -version 2>&1 | awk -F '"' '/version/ {
18 | split($2, v, "[._]");
19 | if (v[2] == "") {
20 | # Version is like "24": assume it is major only and add .0.0
21 | printf "%s.0.0\n", v[1]
22 | } else if (v[1] == "1") {
23 | # Java 8 or older: Format is "1.major.minor_update"
24 | printf "%s.%s.%s\n", v[2], v[3], v[4]
25 | } else {
26 | # Java 9 or newer: Format is "major.minor.patch"
27 | printf "%s.%s.%s\n", v[1], v[2], v[3]
28 | }
29 | }')
30 | export JAVA_VERSION
31 |
32 | apk update && apk add curl moreutils wget hexdump linux-headers bash make g++ clang git cppcheck jq cmake gtest-dev gmock tar >/dev/null
33 |
34 | ./gradlew -PCI -PkeepJFRs :ddprof-test:test${CONFIG} --no-daemon --parallel --build-cache --no-watch-fs
--------------------------------------------------------------------------------
/.github/workflows/add-milestone-to-pull-requests.yaml:
--------------------------------------------------------------------------------
1 | name: Add milestone to pull requests
2 | on:
3 | pull_request:
4 | types: [closed]
5 | branches:
6 | - main
7 |
8 | permissions:
9 | contents: read
10 | pull-requests: write
11 | issues: write
12 |
13 | jobs:
14 | add_milestone_to_merged:
15 | if: github.event.pull_request.merged && github.event.pull_request.milestone == null
16 | name: Add milestone to merged pull requests
17 | runs-on: ubuntu-latest
18 | steps:
19 | - name: Get project milestones
20 | id: milestones
21 | uses: actions/github-script@47f7cf65b5ced0830a325f705cad64f2f58dddf7 # 3.1.0
22 | with:
23 | github-token: ${{secrets.GITHUB_TOKEN}}
24 | script: |
25 | const list = await github.issues.listMilestones({
26 | owner: context.repo.owner,
27 | repo: context.repo.repo,
28 | state: 'open'
29 | })
30 | // Need to manually sort because "sort by number" isn't part of the api
31 | // highest number first
32 | const milestones = list.data.sort((a,b) => (b.number - a.number))
33 |
34 | return milestones.length == 0 ? null : milestones[0].number
35 | - name: Update Pull Request
36 | if: steps.milestones.outputs.result != null
37 | uses: actions/github-script@47f7cf65b5ced0830a325f705cad64f2f58dddf7 # 3.1.0
38 | with:
39 | github-token: ${{secrets.GITHUB_TOKEN}}
40 | script: |
41 | // Confusingly, the issues api is used because pull requests are issues
42 | await github.issues.update({
43 | owner: context.repo.owner,
44 | repo: context.repo.repo,
45 | issue_number: ${{ github.event.pull_request.number }},
46 | milestone: ${{ steps.milestones.outputs.result }},
47 | });
48 |
--------------------------------------------------------------------------------
/.github/workflows/approve-trivial.yml:
--------------------------------------------------------------------------------
1 | name: Auto-Approve Trivial PRs
2 |
3 | on:
4 | pull_request_target:
5 | types: [labeled]
6 |
7 | permissions:
8 | pull-requests: write
9 | contents: read
10 |
11 | jobs:
12 | auto-approve:
13 | if: contains(github.event.pull_request.labels.*.name, 'trivial') || contains(github.event.pull_request.labels.*.name, 'no-review')
14 | runs-on: ubuntu-latest
15 | steps:
16 | - name: Auto-approve PR
17 | uses: hmarr/auto-approve-action@v4
18 | with:
19 | github-token: ${{ secrets.GITHUB_TOKEN }}
20 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI Run
2 |
3 | concurrency:
4 | group: pr-ci_${{ github.event.pull_request.number }}
5 | cancel-in-progress: true
6 |
7 | on:
8 | push:
9 | branches:
10 | - '*'
11 | tags-ignore:
12 | - v*
13 | pull_request:
14 | workflow_dispatch:
15 |
16 | permissions:
17 | contents: read
18 | pull-requests: read
19 | actions: read
20 |
21 | jobs:
22 | check-for-pr:
23 | runs-on: ubuntu-latest
24 | outputs:
25 | skip: ${{ steps.check.outputs.skip }}
26 | steps:
27 | - name: Check if PR exists
28 | id: check
29 | env:
30 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
31 | HEAD_REF: ${{ github.head_ref }}
32 | run: |
33 | if [ -z "${{ github.base_ref }}" ]; then
34 | prs=$(gh pr list \
35 | --repo "$GITHUB_REPOSITORY" \
36 | --json baseRefName,headRefName \
37 | --jq '
38 | map(select(.baseRefName == "${{ github.base_ref }}" and .headRefName == "$HEAD_REF}"))
39 | | length
40 | ')
41 | if ((prs > 0)); then
42 | echo "skip=true" >> "$GITHUB_OUTPUT"
43 | fi
44 | fi
45 | check-formatting:
46 | runs-on: ubuntu-22.04
47 | needs: check-for-pr
48 | if: needs.check-for-pr.outputs.skip != 'true'
49 | steps:
50 | - uses: actions/checkout@v3
51 | - name: Setup OS
52 | run: |
53 | sudo apt-get update
54 | sudo apt-get install -y clang-format-11
55 | # we need this to make sure we are actually using clang-format v. 11
56 | sudo mv /usr/bin/clang-format /usr/bin/clang-format-14
57 | sudo mv /usr/bin/clang-format-11 /usr/bin/clang-format
58 |
59 | - name: Cache Gradle Wrapper Binaries
60 | uses: actions/cache@v4
61 | with:
62 | path: ~/.gradle/wrapper/dists
63 | key: gradle-wrapper-${{ runner.os }}-${{ hashFiles('gradle/wrapper/gradle-wrapper.properties') }}
64 | restore-keys: |
65 | gradle-wrapper-${{ runner.os }}-
66 |
67 | - name: Cache Gradle User Home
68 | uses: actions/cache@v4
69 | with:
70 | path: ~/.gradle/caches
71 | key: gradle-caches-${{ runner.os }}-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
72 | restore-keys: |
73 | gradle-caches-${{ runner.os }}-
74 |
75 | - name: Check
76 | run: |
77 | ./gradlew spotlessCheck --no-daemon --parallel --build-cache --no-watch-fs
78 |
79 | test-matrix:
80 | needs: check-formatting
81 | if: needs.check-for-pr.outputs.skip != 'true'
82 | uses: ./.github/workflows/test_workflow.yml
83 | with:
84 | configuration: '["debug"]'
85 |
86 | gh-release:
87 | if: startsWith(github.event.ref, 'refs/heads/release/')
88 | runs-on: ubuntu-latest
89 | needs: [test-matrix]
90 | steps:
91 | - name: Create Github Release
92 | uses: ./.github/workflows/gh_release.yml@gh-release
93 | with:
94 | release_branch: ${GITHUB_REF_NAME}
95 |
--------------------------------------------------------------------------------
/.github/workflows/create-next-milestone.yaml:
--------------------------------------------------------------------------------
1 | name: Create next milestone
2 | on:
3 | milestone:
4 | types: [closed]
5 |
6 | permissions:
7 | contents: read
8 | issues: write
9 |
10 | jobs:
11 | create_next_milestone:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - name: Get next minor version
15 | id: semvers
16 | uses: WyriHaximus/github-action-next-semvers@33d116a4c239252582a60a1ba8dbba63ad493ffd # 1.1.0
17 | with:
18 | version: ${{ github.event.milestone.title }}
19 | - name: Create next milestone
20 | uses: WyriHaximus/github-action-create-milestone@b86699ba7511fa3b61154ac8675d86b01938fc16 # 1.0.0
21 | with:
22 | title: ${{ steps.semvers.outputs.minor }}
23 | env:
24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25 |
--------------------------------------------------------------------------------
/.github/workflows/gh_release.yml:
--------------------------------------------------------------------------------
1 | name: Github Release
2 | run-name: Release ${{ inputs.release_tag }} ${{ github.event.ref_name }}
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | release_tag:
7 | type: string
8 | description: "Release tag"
9 | required: true
10 | workflow_call:
11 | inputs:
12 | release_tag:
13 | type: string
14 | description: "Release tag"
15 | required: false
16 | push:
17 | tags:
18 | - v_*.*.*
19 |
20 | permissions:
21 | contents: write
22 | actions: read
23 |
24 | jobs:
25 | gh-release:
26 | if: (startsWith(github.event.ref, 'refs/tags/v_') || inputs.release_tag != '') && !endsWith(github.event.ref, '-SNAPSHOT')
27 | runs-on: ubuntu-latest
28 | steps:
29 | - uses: actions/checkout@v3
30 | with:
31 | fetch-depth: 0
32 | - uses: webfactory/ssh-agent@v0.7.0
33 | with:
34 | ssh-private-key: ${{ secrets.SSH_PUSH_SECRET }}
35 | - name: Create Release [automatic]
36 | id: create_release_auto
37 | uses: ncipollo/release-action@v1
38 | if: ${{ startsWith(github.ref, 'refs/tags/') }}
39 | with:
40 | generateReleaseNotes: true
41 | allowUpdates: true
42 | draft: true
43 | env:
44 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
45 | - name: Create Release [manual]
46 | id: create_release_manual
47 | uses: ncipollo/release-action@v1
48 | if: ${{ !startsWith(github.ref, 'refs/tags/') }}
49 | with:
50 | generateReleaseNotes: true
51 | allowUpdates: true
52 | tag: ${{ inputs.release_tag}}
53 | draft: true
54 | env:
55 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
--------------------------------------------------------------------------------
/.github/workflows/increment-milestones-on-tag.yaml:
--------------------------------------------------------------------------------
1 | name: Increment milestones on tag
2 | on:
3 | create
4 |
5 | permissions:
6 | contents: read
7 | issues: write
8 |
9 | jobs:
10 | increment_milestone:
11 | if: github.event.ref_type == 'tag' && github.event.master_branch == 'main'
12 | runs-on: ubuntu-latest
13 | steps:
14 | - name: Get milestone title
15 | id: milestoneTitle
16 | uses: actions/github-script@47f7cf65b5ced0830a325f705cad64f2f58dddf7 # 3.1.0
17 | with:
18 | result-encoding: string
19 | script: |
20 | // Our tags are of the form v_X.X.X and milestones don't have the "v"
21 | return '${{github.event.ref}}'.startsWith('v_') ? '${{github.event.ref}}'.substring(2) : '${{github.event.ref}}';
22 | - name: Get milestone for tag
23 | id: milestone
24 | uses: actions/github-script@47f7cf65b5ced0830a325f705cad64f2f58dddf7 # 3.1.0
25 | with:
26 | github-token: ${{secrets.GITHUB_TOKEN}}
27 | script: |
28 | const milestones = await github.paginate(github.issues.listMilestones, {
29 | owner: context.repo.owner,
30 | repo: context.repo.repo,
31 | state: 'all'
32 | })
33 |
34 | const milestone = milestones.find(milestone => milestone.title == '${{steps.milestoneTitle.outputs.result}}')
35 |
36 | if (milestone) {
37 | return milestone.number
38 | } else {
39 | return null
40 | }
41 | - name: Close milestone
42 | if: fromJSON(steps.milestone.outputs.result)
43 | uses: actions/github-script@47f7cf65b5ced0830a325f705cad64f2f58dddf7 # 3.1.0
44 | with:
45 | github-token: ${{secrets.GITHUB_TOKEN}}
46 | script: |
47 | await github.issues.updateMilestone({
48 | owner: context.repo.owner,
49 | repo: context.repo.repo,
50 | state: 'closed',
51 | milestone_number: ${{steps.milestone.outputs.result}}
52 | })
53 | - name: Get next minor version
54 | if: fromJSON(steps.milestone.outputs.result)
55 | id: semvers
56 | uses: WyriHaximus/github-action-next-semvers@33d116a4c239252582a60a1ba8dbba63ad493ffd # 1.1.0
57 | with:
58 | version: ${{steps.milestoneTitle.outputs.result}}
59 | - name: Create next milestone
60 | if: fromJSON(steps.milestone.outputs.result)
61 | uses: WyriHaximus/github-action-create-milestone@b86699ba7511fa3b61154ac8675d86b01938fc16 # 1.0.0
62 | with:
63 | title: ${{ steps.semvers.outputs.minor }}
64 | env:
65 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
66 |
--------------------------------------------------------------------------------
/.github/workflows/nightly.yml:
--------------------------------------------------------------------------------
1 | name: Nightly Sanitized Run
2 |
3 | on:
4 | schedule:
5 | # Runs every day at 03:00 UTC
6 | - cron: '0 3 * * *'
7 | workflow_dispatch:
8 |
9 | permissions:
10 | contents: read
11 | actions: read
12 |
13 | jobs:
14 | run-test:
15 | uses: ./.github/workflows/test_workflow.yml
16 | with:
17 | configuration: '["asan"]' # Ignoring tsan for now '["asan", "tsan"]'
18 | report-failures:
19 | runs-on: ubuntu-latest
20 | needs: run-test
21 | if: failure()
22 | steps:
23 | - name: Download failed tests artifact
24 | uses: actions/download-artifact@v4
25 | with:
26 | name: failures
27 | path: ./artifacts
28 | - name: Report failures
29 | run: |
30 | find ./artifacts -name 'failures_*' -exec cat {} \; > ./artifacts/failures.txt
31 | scenarios=$(cat ./artifacts/failures.txt | tr '\n' ',')
32 |
33 | echo "Failed scenarios: $scenarios"
34 |
35 | curl -X POST "${{ secrets.SLACK_WEBHOOK }}" \
36 | -H 'Content-Type: application/json' \
37 | -d "{'scenarios': '${scenarios}', 'failed_run_url': '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'}"
38 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Automated Release
2 | run-name: "${{ inputs.dry_run && 'Dry-run for ' || 'Preform ' }} ${{ inputs.release_type }} release of ${{ github.ref }} branch"
3 |
4 | on:
5 | workflow_dispatch:
6 | inputs:
7 | release_type:
8 | type: choice
9 | description: The release type
10 | options:
11 | - "major"
12 | - "minor"
13 | - "patch"
14 | default: "minor"
15 | dry_run:
16 | description: Perform the release dry-run
17 | required: true
18 | type: boolean
19 | default: true
20 |
21 | permissions:
22 | contents: write
23 | actions: read
24 |
25 | jobs:
26 | release-branch:
27 | runs-on: ubuntu-latest
28 | steps:
29 | - name: Output Inputs
30 | run: |
31 | echo "${{ toJSON(github.event.inputs) }}"
32 | echo "${{ toJSON(inputs) }}"
33 | echo "${{ inputs.release_type }}"
34 | - uses: webfactory/ssh-agent@v0.7.0
35 | with:
36 | ssh-private-key: ${{ secrets.SSH_PUSH_SECRET }}
37 | - name: Checkout ${{ env.GITHUB_REPOSITORY }}
38 | run: git clone git@github.com:$GITHUB_REPOSITORY.git java-profiler
39 | - name: Configure git env
40 | run: |
41 | git config --global user.email "java-profiler@datadoghq.com"
42 | git config --global user.name "Datadog Java Profiler"
43 | - name: Create release
44 | env:
45 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
46 | run: |
47 | if [ "${{ inputs.dry_run }}" != "true" ]; then
48 | if [ "${{ inputs.release_type }}" != "patch" ]&& [[ ${GITHUB_REF_NAME} =~ release/.* ]]; then
49 | echo "::error Only patch release can be performed from a release branch"
50 | exit 1
51 | fi
52 | if [ "${{ inputs.release_type }}" == "patch" ]&& [[ ! ${GITHUB_REF_NAME} =~ release/.* ]]; then
53 | echo "::error::A patch release can be performed only from a release branch"
54 | exit 1
55 | fi
56 | if [ "${{ inputs.release_type }}" != "patch" ]&& [[ ! ${GITHUB_REF_NAME} =~ main ]]; then
57 | echo "::error::A major or minor release can be performed only from 'main' branch"
58 | exit 1
59 | fi
60 | else
61 | DRY_RUN="--dry-run"
62 | fi
63 |
64 | TYPE="${{ inputs.release_type }}"
65 | cd java-profiler
66 | git checkout $GITHUB_REF_NAME
67 | ./.github/scripts/release.sh ${TYPE^^} $DRY_RUN
--------------------------------------------------------------------------------
/.github/workflows/update_assets.yml:
--------------------------------------------------------------------------------
1 | name: Update Release Assets
2 | run-name: Update assets for ${{ inputs.release_tag }}
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | release_tag:
7 | type: string
8 | description: "Release tag"
9 | required: true
10 | push:
11 | tags:
12 | - v_*.*.*
13 |
14 | permissions:
15 | contents: write
16 | actions: read
17 |
18 | jobs:
19 | update-assets-and-releaase:
20 | if: (startsWith(github.event.ref, 'refs/tags/v_') || inputs.release_tag != '') && !endsWith(github.event.ref, '-SNAPSHOT')
21 | runs-on: ubuntu-latest
22 | steps:
23 | - name: Setup System
24 | id: setup-system
25 | run: |
26 | sudo apt update && sudo apt install -y wget unzip
27 | - name: Download Assets
28 | id: download-assets
29 | timeout-minutes: 30
30 | run: |
31 | # ignore errors to allow reattempted downloads
32 | set +e
33 | TAG=${{ inputs.release_tag }}
34 | if [ -z "$TAG" ]; then
35 | TAG="$GITHUB_REF_NAME"
36 | fi
37 | VERSION=$(echo "${TAG}" | sed -e 's/v_//g')
38 | ASSET_URL="https://oss.sonatype.org/service/local/repositories/releases/content/com/datadoghq/ddprof/${VERSION}/ddprof-${VERSION}.jar"
39 | RESULT=1
40 | while [ $RESULT -ne 0 ]; do
41 | wget -q $ASSET_URL
42 | RESULT=$?
43 | if [ $RESULT -ne 0 ]; then
44 | echo "Artifact not available. Retrying in 30 seconds."
45 | sleep 30
46 | fi
47 | done
48 | echo "VERSION=${VERSION}" >> $GITHUB_ENV
49 | - name: Prepare Assets
50 | id: prepare-assets
51 | run: |
52 | LIB_BASE_DIR="META-INF/native-libs"
53 | mkdir assets
54 | cp ddprof-${VERSION}.jar assets/ddprof.jar
55 | cp ddprof-${VERSION}.jar assets/ddprof-${VERSION}.jar
56 | unzip ddprof-${VERSION}.jar
57 | mv ${LIB_BASE_DIR}/linux-arm64/libjavaProfiler.so assets/libjavaProfiler_linux-arm64.so
58 | mv ${LIB_BASE_DIR}/linux-x64/libjavaProfiler.so assets/libjavaProfiler_linux-x64.so
59 | mv ${LIB_BASE_DIR}/linux-arm64-musl/libjavaProfiler.so assets/libjavaProfiler_linux-arm64-musl.so
60 | mv ${LIB_BASE_DIR}/linux-x64-musl/libjavaProfiler.so assets/libjavaProfiler_linux-x64-musl.so
61 | - name: Update release ${{ inputs.release_tag }}
62 | id: update-release
63 | uses: ncipollo/release-action@v1
64 | with:
65 | token: ${{ secrets.GITHUB_TOKEN }}
66 | tag: "v_${{ env.VERSION }}"
67 | allowUpdates: true
68 | generateReleaseNotes: true
69 | omitBodyDuringUpdate: true
70 | artifacts: assets/ddprof*.jar,assets/*.so
71 | draft: false
72 | makeLatest: true
73 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | **/build/
2 | **/build_*/
3 | **/build-*/
4 | /nbproject/
5 | /out/
6 | /.idea/
7 | /target/
8 | **/*.class
9 | **/*.class.h
10 | **/*.so
11 | **/*.o
12 | .vscode
13 | .classpath
14 | .project
15 | .settings
16 | .gradle
17 | .tmp
18 | *.iml
19 | /ddprof-stresstest/jmh-result.*
20 | /ddprof-lib/src/main/cpp-external/**/*
21 |
22 | **/.resources/
23 |
24 | # ignore all temporary locations related to maven builds
25 | datadog/maven/tmp
26 | datadog/maven/repository
27 | datadog/maven/resources
28 |
29 | **/harness*
30 | **/launcher*
31 | /gradle.properties
32 | **/hs_err*
33 |
34 | # cursor AI history
35 | .history
36 |
--------------------------------------------------------------------------------
/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | # Triggers a build within the Datadog infrastructure in the ddprof-build repository
2 | trigger_internal_build:
3 | rules:
4 | - if: $CI_COMMIT_BRANCH =~ /release\/.*/
5 | when: never
6 | - when: always
7 | allow_failure: false
8 | variables:
9 | DOWNSTREAM_BRANCH: "main"
10 | DDPROF_DEFAULT_BRANCH: "main"
11 | DDPROF_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
12 | DDROF_COMMIT_SHA: ${CI_COMMIT_SHA}
13 | DPROF_SHORT_COMMIT_SHA: ${CI_COMMIT_SHORT_SHA}
14 | DDPROF_COMMIT_TAG: ${CI_COMMIT_TAG}
15 | trigger:
16 | project: DataDog/apm-reliability/async-profiler-build
17 | strategy: depend
18 | branch: $DOWNSTREAM_BRANCH
19 | forward:
20 | pipeline_variables: true
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 | !TODO!
3 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 | dependencies {
3 | classpath("com.dipien:semantic-version-gradle-plugin:2.0.0")
4 | }
5 | repositories {
6 | mavenLocal()
7 | mavenCentral()
8 | gradlePluginPortal()
9 | }
10 | }
11 |
12 | plugins {
13 | id 'io.github.gradle-nexus.publish-plugin' version '2.0.0'
14 | id "com.diffplug.spotless" version "6.11.0"
15 | }
16 |
17 | version = "1.28.0"
18 |
19 | apply plugin: "com.dipien.semantic-version"
20 | version = project.findProperty("ddprof_version") ?: version
21 |
22 | allprojects {
23 | repositories {
24 | mavenCentral()
25 | mavenCentral()
26 | gradlePluginPortal()
27 | }
28 | }
29 |
30 | repositories {
31 | mavenLocal()
32 | mavenCentral()
33 | gradlePluginPortal()
34 |
35 | maven {
36 | content {
37 | includeGroup "com.datadoghq"
38 | }
39 | mavenContent {
40 | snapshotsOnly()
41 | }
42 | url 'https://oss.sonatype.org/content/repositories/snapshots/'
43 | }
44 | }
45 |
46 | allprojects {
47 | group = 'com.datadoghq'
48 |
49 | apply from: rootProject.file('common.gradle')
50 | apply from: rootProject.file('gradle/configurations.gradle')
51 | apply from: "$rootDir/gradle/spotless.gradle"
52 | }
53 |
54 | subprojects {
55 | version = rootProject.version
56 | }
57 |
58 | apply from: rootProject.file('common.gradle')
59 | apply from: rootProject.file('gradle/configurations.gradle')
60 |
61 | def isCI = System.getenv("CI") != null
62 |
63 | nexusPublishing {
64 | repositories {
65 | def forceLocal = project.hasProperty('forceLocal')
66 |
67 | if (forceLocal && !isCI) {
68 | local {
69 | // For testing use with https://hub.docker.com/r/sonatype/nexus
70 | // docker run --rm -d -p 8081:8081 --name nexus sonatype/nexus
71 | // Doesn't work for testing releases though... (due to staging)
72 | nexusUrl = uri("http://localhost:8081/nexus/content/repositories/releases/")
73 | snapshotRepositoryUrl = uri("http://localhost:8081/nexus/content/repositories/snapshots/")
74 | username = "admin"
75 | password = "admin123"
76 | }
77 | } else {
78 | sonatype {
79 | username = System.getenv("SONATYPE_USERNAME")
80 | password = System.getenv("SONATYPE_PASSWORD")
81 | }
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/ddprof-lib/benchmarks/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'cpp-application'
3 | }
4 |
5 | // this feels weird but it is the only way invoking `./gradlew :ddprof-lib:*` tasks will work
6 | if (rootDir.toString().endsWith("ddprof-lib/gradle")) {
7 | apply from: rootProject.file('../../common.gradle')
8 | }
9 |
10 | application {
11 | baseName = "unwind_failures_benchmark"
12 | source.from file('src')
13 | privateHeaders.from file('src')
14 |
15 | targetMachines = [machines.macOS, machines.linux.x86_64]
16 | }
17 |
18 | // Include the main library headers
19 | tasks.withType(CppCompile).configureEach {
20 | includes file('../src/main/cpp').toString()
21 | }
22 |
23 | // Add a task to run the benchmark
24 | tasks.register('runBenchmark', Exec) {
25 | dependsOn 'assemble'
26 | workingDir = buildDir
27 |
28 | doFirst {
29 | // Find the executable by looking for it in the build directory
30 | def executableName = "unwind_failures_benchmark"
31 | def executable = null
32 |
33 | // Search for the executable in the build directory
34 | buildDir.eachFileRecurse { file ->
35 | if (file.isFile() && file.name == executableName && file.canExecute()) {
36 | executable = file
37 | return true // Stop searching once found
38 | }
39 | }
40 |
41 | if (executable == null) {
42 | throw new GradleException("Executable '${executableName}' not found in ${buildDir.absolutePath}. Make sure the build was successful.")
43 | }
44 |
45 | // Build command line with the executable path and any additional arguments
46 | def cmd = [executable.absolutePath]
47 |
48 | // Add any additional arguments passed to the Gradle task
49 | if (project.hasProperty('args')) {
50 | cmd.addAll(project.args.split(' '))
51 | }
52 |
53 | println "Running benchmark using executable at: ${executable.absolutePath}"
54 | commandLine = cmd
55 | }
56 |
57 | doLast {
58 | println "Benchmark completed."
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/ddprof-lib/benchmarks/build_run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -euo pipefail
4 |
5 | HERE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
6 | cd "${HERE}/.."
7 |
8 | # Build and run the benchmark using Gradle
9 | ./gradlew :ddprof-lib:benchmarks:runBenchmark
10 |
--------------------------------------------------------------------------------
/ddprof-lib/benchmarks/src/benchmarkConfig.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include
5 |
6 | struct BenchmarkResult {
7 | std::string name;
8 | long long total_time_ns;
9 | int iterations;
10 | double avg_time_ns;
11 | };
12 |
13 | struct BenchmarkConfig {
14 | int warmup_iterations;
15 | int measurement_iterations;
16 | std::string csv_file;
17 | std::string json_file;
18 | bool debug;
19 |
20 | BenchmarkConfig() : warmup_iterations(100000), measurement_iterations(1000000), debug(false) {
21 | }
22 | };
23 |
--------------------------------------------------------------------------------
/ddprof-lib/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = "JavaProfiler"
2 |
3 | include ':ddprof-lib:benchmarks'
4 |
--------------------------------------------------------------------------------
/ddprof-lib/src/main/cpp/arch_dd.h:
--------------------------------------------------------------------------------
1 | #ifndef _ARCH_DD_H
2 | #define _ARCH_DD_H
3 |
4 | #include "arch.h"
5 |
6 | #include
7 |
8 | static inline long long atomicInc(volatile long long &var,
9 | long long increment = 1) {
10 | return __sync_fetch_and_add(&var, increment);
11 | }
12 |
13 | static inline u64 loadAcquire(volatile u64 &var) {
14 | return __atomic_load_n(&var, __ATOMIC_ACQUIRE);
15 | }
16 |
17 | static inline size_t loadAcquire(volatile size_t &var) {
18 | return __atomic_load_n(&var, __ATOMIC_ACQUIRE);
19 | }
20 |
21 | static inline void storeRelease(volatile long long &var, long long value) {
22 | return __atomic_store_n(&var, value, __ATOMIC_RELEASE);
23 | }
24 |
25 | static inline void storeRelease(volatile size_t &var, size_t value) {
26 | return __atomic_store_n(&var, value, __ATOMIC_RELEASE);
27 | }
28 |
29 | #endif // _ARCH_DD_H
30 |
--------------------------------------------------------------------------------
/ddprof-lib/src/main/cpp/asyncSampleMutex.h:
--------------------------------------------------------------------------------
1 | #ifndef ASYNCSAMPLEMUTEX_H
2 | #define ASYNCSAMPLEMUTEX_H
3 |
4 | #include "threadLocalData.h"
5 |
6 | // controls access to AGCT
7 | class AsyncSampleMutex {
8 | private:
9 | ThreadLocalData *_threadLocalData;
10 | bool _acquired;
11 |
12 | bool try_acquire() {
13 | if (_threadLocalData != nullptr && !_threadLocalData->is_unwinding_Java()) {
14 | _threadLocalData->set_unwinding_Java(true);
15 | return true;
16 | }
17 | return false;
18 | }
19 |
20 | public:
21 | AsyncSampleMutex(ThreadLocalData *threadLocalData)
22 | : _threadLocalData(threadLocalData) {
23 | _acquired = try_acquire();
24 | }
25 |
26 | AsyncSampleMutex(AsyncSampleMutex &other) = delete;
27 |
28 | ~AsyncSampleMutex() {
29 | if (_acquired) {
30 | _threadLocalData->set_unwinding_Java(false);
31 | }
32 | }
33 |
34 | bool acquired() { return _acquired; }
35 | };
36 |
37 | #endif // ASYNCSAMPLEMUTEX_H
38 |
--------------------------------------------------------------------------------
/ddprof-lib/src/main/cpp/callTraceStorage.h:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Andrei Pangin
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | #ifndef _CALLTRACESTORAGE_H
18 | #define _CALLTRACESTORAGE_H
19 |
20 | #include "arch_dd.h"
21 | #include "linearAllocator.h"
22 | #include "spinLock.h"
23 | #include "vmEntry.h"
24 | #include