├── .github ├── CODEOWNERS ├── PULL_REQUEST_TEMPLATE │ └── new_release.md ├── pull_request_template.md └── workflows │ ├── audit.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .gitmodules ├── CHANGELOG.md ├── Cargo.lock ├── Cargo.toml ├── Dockerfile.proxy ├── Dockerfile.release ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── RELEASING.md ├── am.toml ├── am ├── Cargo.toml ├── build.rs └── src │ ├── commands.rs │ ├── commands │ ├── explore.rs │ ├── init.rs │ ├── instrument.rs │ ├── list.rs │ ├── proxy.rs │ ├── start.rs │ ├── system.rs │ ├── system │ │ └── prune.rs │ └── update.rs │ ├── dir.rs │ ├── downloader.rs │ ├── interactive.rs │ ├── main.rs │ ├── server.rs │ ├── server │ ├── explorer.rs │ ├── functions.rs │ ├── prometheus.rs │ ├── pushgateway.rs │ └── util.rs │ └── terminal.rs ├── am_list ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Cargo.toml ├── README.md ├── assets │ └── contributing │ │ └── ts-playground.png ├── runtime │ └── queries │ │ ├── README.md │ │ ├── go │ │ ├── all_functions.scm │ │ └── autometrics.scm │ │ ├── python │ │ ├── all_functions.scm │ │ ├── autometrics.scm.tpl │ │ └── import.scm │ │ ├── rust │ │ ├── all_functions.scm │ │ ├── am_struct.scm.tpl │ │ └── autometrics.scm │ │ └── typescript │ │ ├── all_functions.scm │ │ ├── autometrics.scm │ │ ├── imports_map.scm │ │ ├── wrapper_call.scm.tpl │ │ └── wrapper_direct_call.scm.tpl └── src │ ├── go.rs │ ├── go │ ├── queries.rs │ └── tests.rs │ ├── lib.rs │ ├── python.rs │ ├── python │ ├── queries.rs │ └── tests.rs │ ├── roots.rs │ ├── rust.rs │ ├── rust │ ├── queries.rs │ └── tests.rs │ ├── typescript.rs │ └── typescript │ ├── imports.rs │ ├── queries.rs │ └── tests.rs ├── assets ├── am-explorer.png └── explorer.png ├── autometrics-am ├── Cargo.toml └── src │ ├── config.rs │ ├── lib.rs │ ├── parser.rs │ └── prometheus.rs ├── docs └── container.md └── files └── explorer ├── graph.html └── index.html /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @autometrics-dev/rust-maintainers 2 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/new_release.md: -------------------------------------------------------------------------------- 1 | # Checklist 2 | 3 | - [ ] Update version in Cargo.toml and Cargo.lock 4 | - [ ] Set version in the CHANGELOG.md 5 | 6 | # Post release checklist 7 | 8 | - [ ] Verify new version are added to the GitHub release 9 | - [ ] Verify new [homebrew repository](https://github.com/autometrics-dev/homebrew-tap) was updated 10 | - [ ] Review and merge documentation PR (https://github.com/autometrics-dev/docs/pulls) 11 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Checklist 2 | 3 | - [ ] Changelog updated 4 | -------------------------------------------------------------------------------- /.github/workflows/audit.yml: -------------------------------------------------------------------------------- 1 | # "Audit your Rust dependencies using cargo audit and the RustSec Advisory DB." 2 | --- 3 | name: "Audit Dependencies" 4 | on: 5 | push: 6 | paths: 7 | # Run if workflow changes 8 | - ".github/workflows/audit.yml" 9 | # Run on changed dependencies 10 | - "**/Cargo.toml" 11 | - "**/Cargo.lock" 12 | # Run if the configuration file changes 13 | - "**/audit.toml" 14 | # Rerun periodicly to pick up new advisories 15 | schedule: 16 | - cron: "0 0 * * *" 17 | # Run manually 18 | workflow_dispatch: 19 | 20 | permissions: read-all 21 | 22 | jobs: 23 | audit: 24 | runs-on: ubuntu-latest 25 | permissions: 26 | issues: write 27 | steps: 28 | - uses: actions/checkout@v3 29 | with: 30 | submodules: recursive 31 | - uses: actions-rust-lang/audit@v1 32 | name: Audit Rust Dependencies 33 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | on: 2 | release: 3 | types: 4 | - published 5 | workflow_dispatch: 6 | 7 | name: Release new version 8 | jobs: 9 | validate-version: 10 | name: Validate `Cargo.toml` version matches tag 11 | runs-on: ubuntu-latest 12 | outputs: 13 | version: ${{ steps.validate.outputs.version }} 14 | steps: 15 | - uses: actions/checkout@v3 16 | with: 17 | submodules: recursive 18 | 19 | - name: Validate version 20 | id: validate 21 | continue-on-error: ${{ github.event_name == 'workflow_dispatch' }} 22 | run: | 23 | # Extract the version from the Cargo.toml 24 | VERSION=$(cat "Cargo.toml" | grep '^version =' | awk '{ split($0,version,"=") ; gsub(/[\ \"]/, "", version[2]) ; print version[2] }') 25 | echo version=$VERSION >> "$GITHUB_OUTPUT" 26 | echo "Cargo.toml version: \`$VERSION\`" >> $GITHUB_STEP_SUMMARY 27 | if [ "v${VERSION}" != "${{ github.event.release.tag_name }}" ]; then 28 | echo "::error file=Cargo.toml::Version set in Cargo.toml (v${VERSION}) does not match release version (${{ github.event.release.tag_name }})" 29 | exit 1 30 | fi 31 | 32 | build-artifacts: 33 | name: Build (${{ matrix.platform.name }}) 34 | needs: validate-version 35 | runs-on: ${{ matrix.platform.os }} 36 | strategy: 37 | matrix: 38 | platform: 39 | - name: linux_aarch64 40 | os: ubuntu-latest 41 | target: aarch64-unknown-linux-gnu 42 | bin: am 43 | file-name: am-linux-aarch64 44 | - name: linux_x86_64 45 | os: ubuntu-latest 46 | target: x86_64-unknown-linux-gnu 47 | bin: am 48 | file-name: am-linux-x86_64 49 | - name: macos_aarch64 50 | os: macOS-latest 51 | target: aarch64-apple-darwin 52 | bin: am 53 | file-name: am-macos-aarch64 54 | - name: macos_x86_64 55 | os: macOS-latest 56 | target: x86_64-apple-darwin 57 | bin: am 58 | file-name: am-macos-x86_64 59 | steps: 60 | - uses: actions/checkout@v3 61 | with: 62 | submodules: recursive 63 | 64 | - name: Build 65 | uses: houseabsolute/actions-rust-cross@v0 66 | with: 67 | target: ${{ matrix.platform.target }} 68 | args: "--locked --release" 69 | strip: true 70 | 71 | - name: Upload binary 72 | uses: actions/upload-artifact@v3 73 | with: 74 | name: ${{ matrix.platform.file-name }} 75 | path: "target/${{ matrix.platform.target }}/release/${{ matrix.platform.bin }}" 76 | 77 | publish-artifacts-docker: 78 | name: Build and publish multi-arch Docker image 79 | runs-on: ubuntu-latest 80 | needs: [build-artifacts, validate-version] 81 | steps: 82 | - uses: actions/checkout@v3 83 | with: 84 | submodules: recursive 85 | 86 | - name: Download am artifacts 87 | uses: actions/download-artifact@v3 88 | with: 89 | path: artifacts 90 | 91 | - name: Prepare files 92 | run: | 93 | mkdir -p build/linux/{amd64,arm64}/ 94 | mv artifacts/am-linux-x86_64/am build/linux/amd64/am 95 | mv artifacts/am-linux-aarch64/am build/linux/arm64/am 96 | chmod u+x build/linux/{amd64,arm64}/am 97 | 98 | - name: Set up QEMU 99 | uses: docker/setup-qemu-action@v2 100 | 101 | - name: Set up Docker Buildx 102 | uses: docker/setup-buildx-action@v2 103 | 104 | - name: Login to Docker Hub 105 | uses: docker/login-action@v2 106 | with: 107 | username: ${{ secrets.DOCKERHUB_USERNAME }} 108 | password: ${{ secrets.DOCKERHUB_TOKEN }} 109 | 110 | - name: Build and push to Docker Hub 111 | uses: docker/build-push-action@v4 112 | with: 113 | file: Dockerfile.release 114 | context: build 115 | platforms: linux/amd64,linux/arm64 116 | push: true 117 | tags: | 118 | autometrics/am:v${{ needs.validate-version.outputs.version }} 119 | autometrics/am:latest 120 | cache-from: type=gha 121 | cache-to: type=gha,mode=max 122 | 123 | - name: Build and push to Docker Hub 124 | uses: docker/build-push-action@v4 125 | with: 126 | file: Dockerfile.proxy 127 | context: build 128 | platforms: linux/amd64,linux/arm64 129 | push: true 130 | tags: | 131 | autometrics/am-proxy:v${{ needs.validate-version.outputs.version }} 132 | autometrics/am-proxy:latest 133 | cache-from: type=gha 134 | cache-to: type=gha,mode=max 135 | 136 | finalize-release: 137 | name: Upload artifacts, trigger homebrew workflow 138 | needs: [build-artifacts, validate-version] 139 | runs-on: ubuntu-latest 140 | permissions: 141 | contents: write 142 | steps: 143 | - name: Download am artifacts 144 | uses: actions/download-artifact@v3 145 | with: 146 | path: artifacts 147 | 148 | - name: Prepare files 149 | run: | 150 | mv artifacts/am-linux-aarch64/am am-linux-aarch64 151 | mv artifacts/am-linux-x86_64/am am-linux-x86_64 152 | mv artifacts/am-macos-aarch64/am am-macos-aarch64 153 | mv artifacts/am-macos-x86_64/am am-macos-x86_64 154 | 155 | - name: Calculate sha256sum 156 | run: | 157 | sha256sum am-linux-aarch64 >> am-linux-aarch64.sha256 158 | sha256sum am-linux-x86_64 >> am-linux-x86_64.sha256 159 | sha256sum am-macos-aarch64 >> am-macos-aarch64.sha256 160 | sha256sum am-macos-x86_64 >> am-macos-x86_64.sha256 161 | echo "### Checksums" 162 | echo "\`$(cat am-linux-aarch64.sha256)\`" >> $GITHUB_STEP_SUMMARY 163 | echo "\`$(cat am-linux-x86_64.sha256)\`" >> $GITHUB_STEP_SUMMARY 164 | echo "\`$(cat am-macos-aarch64.sha256)\`" >> $GITHUB_STEP_SUMMARY 165 | echo "\`$(cat am-macos-x86_64.sha256)\`" >> $GITHUB_STEP_SUMMARY 166 | 167 | - name: Upload checksums 168 | uses: actions/upload-artifact@v3 169 | with: 170 | name: checksums 171 | path: "*.sha256" 172 | 173 | - name: Attach artifacts to release 174 | uses: softprops/action-gh-release@v1 175 | if: ${{ github.event_name == 'release' }} 176 | with: 177 | files: | 178 | am-linux-aarch64 179 | am-linux-aarch64.sha256 180 | am-linux-x86_64 181 | am-linux-x86_64.sha256 182 | am-macos-aarch64 183 | am-macos-aarch64.sha256 184 | am-macos-x86_64 185 | am-macos-x86_64.sha256 186 | 187 | - name: Trigger homebrew workflow 188 | if: ${{ github.event_name == 'release' }} 189 | env: 190 | AM_VERSION: ${{ needs.validate-version.outputs.version }} 191 | GH_TOKEN: ${{ secrets.PRIVATE_GITHUB_TOKEN }} 192 | run: | 193 | gh workflow run update_formula.yml \ 194 | -R autometrics-dev/homebrew-tap \ 195 | -f AM_VERSION=$AM_VERSION \ 196 | -f SHA256_AARCH64_APPLE_DARWIN=$(cat am-macos-aarch64.sha256 | awk '{print $1}') \ 197 | -f SHA256_AARCH64_LINUX_GNU=$(cat am-linux-aarch64.sha256 | awk '{print $1}') \ 198 | -f SHA256_X86_64_APPLE_DARWIN=$(cat am-macos-x86_64.sha256 | awk '{print $1}') \ 199 | -f SHA256_X86_64_LINUX_GNU=$(cat am-linux-x86_64.sha256 | awk '{print $1}') 200 | 201 | - name: Trigger the CLI reference docs update 202 | env: 203 | GH_TOKEN: ${{ secrets.PRIVATE_GITHUB_TOKEN }} 204 | run: | 205 | gh workflow run update-cli-reference.yml \ 206 | -R autometrics-dev/docs 207 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # This workflow will be triggered by a GitHub pull-request and pushes to main. 2 | # It will verify that the code compiles, that the tests pass and that 3 | # clippy does not complain (too much) and finally that the code is formatted 4 | # according to cargo fmt. If it was a push to main, then it will also trigger a 5 | # build, and a deploy to dev. 6 | --- 7 | name: Test 8 | 9 | on: 10 | pull_request: 11 | branches: ["*"] 12 | push: 13 | branches: ["main"] 14 | 15 | jobs: 16 | test: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v3 20 | with: 21 | submodules: recursive 22 | 23 | - name: Install Rust 24 | uses: actions-rust-lang/setup-rust-toolchain@v1 25 | with: 26 | toolchain: stable 27 | components: clippy, rustfmt 28 | cache: true 29 | 30 | - name: Rustfmt Check 31 | uses: actions-rust-lang/rustfmt@v1 32 | 33 | - name: Cargo clippy 34 | run: cargo clippy --all-features --all 35 | 36 | - name: Run tests 37 | run: cargo test --all 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | **/target 3 | 4 | # This directory is created by the prometheus process 5 | # Could be removed once we persist the data somewhere else 6 | data 7 | 8 | .vscode 9 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "files/autometrics-shared"] 2 | path = files/autometrics-shared 3 | url = https://github.com/autometrics-dev/autometrics-shared.git 4 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [Unreleased] 9 | 10 | - Update octocrab dependency to get rid of a bunch of duplicate crates (#157) 11 | 12 | ## [0.6.0] 13 | 14 | - Proxy static assets for explorer instead of loading them directly from explorer.autometrics.dev. This url is also configurable so it is easier to use explorer hosted on a different URL (#142) 15 | - Add new endpoint `/api/functions` listing all autometrics'd functions in the current 16 | working directory 17 | - `am list all` now detects Python projects that use either `setuptools`, a 18 | `pyproject.toml`-compatible solution, or a `requirements.txt` file (#143) 19 | - `am list` now properly ignores the `target/` folder of Rust projects (#143) 20 | - `am list` now tries to track Rust module name better when called inside a 21 | cargo workspace (#143) 22 | - If a proxied request fails, am will now log more information and using 23 | different log levels, depending on the status code (#146): 24 | - 4xx: `debug` (still requires `--verbose` flag) 25 | - 5xx and connection issues: `warn` (will log by default) 26 | - Other status codes: `trace` 27 | - `am list` now properly detects methods in Go (#148) 28 | - Update Rust dependencies (#150) 29 | - Update default versions of Prometheus and Pushgateway (#150) 30 | - Add ability to scrape the metrics of `am`s own web server with `am start --scrape-self` (#153) 31 | - `am list` now properly detects functions instrumented in Typescript using the `Autometrics` decorator (#152) 32 | - `am instrument` is a new subcommand that can automatically add annotations to instrument a project (#152) 33 | + it works in Go, Python, Typescript, and Rust projects 34 | - The URL overview of `am start` and `am proxy` is now prettier (#154) 35 | - Updated dependencies (#155) 36 | 37 | ## [0.5.0] 38 | 39 | - The Prometheus write endpoint is now enabled by default (#136) 40 | - Include `am_list` crate inside the workspace, and add the function listing feature (#131) 41 | 42 | ## [0.4.1] 43 | 44 | - Change Docker Hub organization to `autometrics` (#133) 45 | - Add documentation about running `am` in a container (#135) 46 | - Publish a container optimized for `am proxy` (#135) 47 | 48 | ## [0.4.0] 49 | 50 | - Dump logs if prometheus or pushgateway return a non 0 exit code (#122) 51 | - Change the default explorer endpoint of when using `am explorer` (#120) 52 | - Update all depdencies (#124) 53 | - Fix multiarch docker image for arm64 users (#125) 54 | - Update markdown reference generator command to disable TOC (#127) 55 | - Add `am proxy` command (#128) 56 | - Update more dependencies (#130) 57 | 58 | ## [0.3.0] 59 | 60 | - SHA-256 checksums are now provided for all artifact downloads (#101) 61 | - Added self updater (#102) 62 | - Use `clap-markdown` fork that enables formatting by display name (#103) 63 | - Correct `web.external-url` will now be passed to Prometheus and Pushgateway 64 | if a custom one is specified with `--listen-address` (#112) 65 | - The generated Prometheus config now gets stored in a unique, temporary location (#113) 66 | - Added new subcommand `init` to create a config file interactively (#117) 67 | - `am` is now available as a multi-arch Docker container on [Docker Hub](https://hub.docker.com/repository/docker/fiberplane/am/general) (#118) 68 | 69 | ## [0.2.1] 70 | 71 | - Do not crash if no `--config-file` is specified and no `am.toml` is found (#106) 72 | 73 | ## [0.2.0] 74 | 75 | - Make logging less verbose, and introduce a `--verbose` flag to verbose logging (#62) 76 | - Use host and port for job name in Prometheus target list (#66) 77 | - Prometheus/Pushgateway data directory no longer defaults to current working directory (#76) 78 | - `--ephemeral` can now be specified to automatically delete data created by 79 | Prometheus/Pushgateway after the process exits (#76) 80 | - Added new subcommand `discord` which links to the discord server (#80) 81 | - The `/metrics` endpoint now transparently redirects to `/pushgateway/metrics` if 82 | Pushgateway is enabled (#81) 83 | - Allow using a config file (am.toml) to set some defaults such as endpoints or 84 | if pushgateway is enabled (#67) 85 | - `honor_labels` will now be set to `true` for the Pushgateway endpoint 86 | in the generated Prometheus config, if it is enabled (#69) 87 | - Redirect `/graph` to `/explorer/graph.html` which will load a different JS 88 | script from explorer (#84) 89 | - Shorthand notion for endpoints defined within the config file (`am.toml`) is now 90 | allowed (#85) 91 | - Allow user to specify the Prometheus scrape interval (#87) 92 | - Added new subcommand `explore` which opens up explorer in the browser (#89) 93 | - The Autometrics SLO rules will now be automatically loaded into Prometheus if 94 | `--no-rules` is not specified (#94) 95 | 96 | ## [0.1.0] 97 | 98 | - Initial release 99 | - Instead of only copying the prometheus binary, simply extract everything (#17) 100 | - Add more flexible endpoints parser (#21) 101 | - Refactor downloading and verifying Prometheus archive (#32) 102 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "am", 4 | "am_list", 5 | "autometrics-am" 6 | ] 7 | default-members = ["am"] 8 | resolver = "2" 9 | 10 | [workspace.package] 11 | version = "0.6.0" 12 | edition = "2021" 13 | authors = ["Fiberplane "] 14 | documentation = "https://docs.rs/autometrics-am" 15 | repository = "https://github.com/autometrics-dev/am" 16 | homepage = "https://autometrics.dev" 17 | license = "MIT OR Apache-2.0" 18 | 19 | [workspace.dependencies] 20 | anyhow = "1.0.71" 21 | humantime = "2.1.0" 22 | serde = { version = "1.0.163", features = ["derive"] } 23 | url = { version = "2.3.1", features = ["serde"] } 24 | -------------------------------------------------------------------------------- /Dockerfile.proxy: -------------------------------------------------------------------------------- 1 | # Create a release image based on Debian with support for multiple architectures 2 | # Use the context that contains the am binaries in the `$OS/$ARCH/am` structure. 3 | # NOTE: Windows is currently not supported 4 | 5 | # This image comes with some differences in that it will run the proxy command 6 | # by default and it will also ensure that the proxy is listening on all 7 | # addresses. 8 | 9 | FROM debian:bookworm-slim 10 | 11 | # These variables _should_ be set by docker buildx 12 | ARG TARGETARCH 13 | ARG TARGETOS 14 | 15 | RUN apt-get update \ 16 | && apt-get install -y --force-yes --no-install-recommends ca-certificates \ 17 | && apt-get clean \ 18 | && apt-get autoremove \ 19 | && rm -rf /var/lib/apt/lists/* 20 | 21 | COPY ${TARGETOS}/${TARGETARCH}/am /app/am 22 | 23 | ENV LISTEN_ADDRESS="0.0.0.0:6789" 24 | 25 | EXPOSE 6789 26 | WORKDIR "/app/" 27 | ENTRYPOINT ["/app/am", "proxy"] 28 | -------------------------------------------------------------------------------- /Dockerfile.release: -------------------------------------------------------------------------------- 1 | # Create a release image based on Debian with support for multiple architectures 2 | # Use the context that contains the am binaries in the `$OS/$ARCH/am` structure. 3 | # NOTE: Windows is currently not supported 4 | 5 | FROM debian:bookworm-slim 6 | 7 | # These variables _should_ be set by docker buildx 8 | ARG TARGETARCH 9 | ARG TARGETOS 10 | 11 | RUN apt-get update \ 12 | && apt-get install -y --force-yes --no-install-recommends ca-certificates \ 13 | && apt-get clean \ 14 | && apt-get autoremove \ 15 | && rm -rf /var/lib/apt/lists/* 16 | 17 | COPY ${TARGETOS}/${TARGETARCH}/am /app/am 18 | 19 | EXPOSE 6789 20 | WORKDIR "/app/" 21 | ENTRYPOINT ["/app/am"] 22 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # am 2 | 3 | `am` is the autometrics companion command line interface (CLI). It makes it easier to create a 4 | local Prometheus environment and inspect the metrics using the explorer. 5 | 6 | [![Discord Shield](https://discordapp.com/api/guilds/950489382626951178/widget.png?style=shield)](https://discord.gg/kHtwcH8As9) 7 | 8 | 9 | ## Features 10 | 11 | - Download, configure and start various Prometheus components such as, 12 | - Prometheus - this will scrape, store and expose the metrics data 13 | - Pushgateway - allow for pushing metrics from batch jobs or short-lived 14 | processes 15 | - OTEL collector (coming soon!) 16 | - Visualize your metrics using the explorer 17 | - Inspect your Service Level Objectives 18 | 19 | ![The Autometrics Explorer](./assets/am-explorer.png) 20 | 21 | ## Getting started 22 | 23 | ### Installation 24 | 25 | The recommended installation for macOS is via [Homebrew](https://brew.sh/): 26 | 27 | ``` 28 | brew install autometrics-dev/tap/am 29 | ``` 30 | 31 | Alternatively, you can download the latest version from the [releases page](https://github.com/autometrics-dev/am/releases) 32 | 33 | ### Quickstart 34 | 35 | 36 | The following will download, configure and start Prometheus. Assuming you've created an application that is running locally on port `3000` it will start scraping the metrics for that service on that port: 37 | 38 | ``` 39 | am start :3000 40 | ``` 41 | 42 | You can also specify a host, scheme or a path: 43 | 44 | ``` 45 | am start https://example.com:3000/api/metrics 46 | ``` 47 | 48 | It is also possible to specify multiple endpoints: 49 | 50 | ``` 51 | am start :3000 :3030 52 | ``` 53 | 54 | Now you can visualize and inspect your metrics using the explorer by visiting `http://localhost:6789/`. 55 | 56 | ![The Autometrics Explorer](./assets/explorer.png) 57 | 58 | ### Container 59 | 60 | We provide a container that packages `am`. For more details see [docs/container.md](docs/container.md). 61 | 62 | ## Documentation 63 | 64 | Visit the autometrics docs site for more details on how to use `am` and more 65 | details about autometrics: https://docs.autometrics.dev/ 66 | 67 | ## Configuration 68 | 69 | `am` support defining a few configuration details in a `am.toml` file. These 70 | will be used by `am start`. Committing the `am.toml` file into git allows your 71 | entire team to have the settings as they are for your application. Example: 72 | 73 | ```toml 74 | pushgateway-enabled = true 75 | 76 | [[endpoint]] 77 | job-name = "main_app" 78 | url = "http://localhost:3030" 79 | ``` 80 | 81 | See [https://docs.autometrics.dev/local-development#configration](https://docs.autometrics.dev/local-development) for more details. 82 | 83 | ## Contributing 84 | 85 | Issues, feature suggestions, and pull requests are very welcome! 86 | 87 | If you are interested in getting involved: 88 | - Join the conversation on [Discord](https://discord.gg/9eqGEs56UB) 89 | - Ask questions and share ideas in the [Github Discussions](https://github.com/orgs/autometrics-dev/discussions) 90 | - Take a look at the overall [Autometrics Project Roadmap](https://github.com/orgs/autometrics-dev/projects/1) 91 | 92 | If you are cloning this repository to make changes, be sure to clone it **with submodules**: 93 | 94 | ```shell 95 | git clone --recurse-submodules https://github.com/autometrics-dev/am.git 96 | ``` 97 | 98 | Planning to release a new version of `am`? Take a look at our [releasing](RELEASING.md) guide. 99 | 100 | ## License 101 | 102 | `am` is distributed under the terms of both the MIT license and the Apache. See 103 | [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) for details. 104 | -------------------------------------------------------------------------------- /RELEASING.md: -------------------------------------------------------------------------------- 1 | # Release process 2 | 3 | In order to release a new version of `am`, first **update the `version` within `Cargo.toml`** to the next desired version, 4 | taking into account any [Semver](https://semver.org/) version bumps. 5 | 6 | Once your PR is approved and merged, create a **GitHub release** with the same `tag` as you set `version` to in `Cargo.toml`. 7 | Our release GitHub actions workflow will automatically build the binaries and attach them to the release. 8 | -------------------------------------------------------------------------------- /am.toml: -------------------------------------------------------------------------------- 1 | pushgateway-enabled = true 2 | # prometheus-scrape-interval = "5m" 3 | 4 | [[endpoint]] 5 | job-name = "main_app" 6 | url = "http://localhost:3030" 7 | # scrape-interval = "5s" 8 | 9 | [[endpoint]] 10 | job-name = "secondary_app" 11 | url = "http://localhost:3030" 12 | -------------------------------------------------------------------------------- /am/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "am" 3 | description = "Autometrics Companion CLI app" 4 | keywords = ["metrics", "prometheus", "opentelemetry"] 5 | categories = ["development-tools::profiling"] 6 | version.workspace = true 7 | edition.workspace = true 8 | authors.workspace = true 9 | documentation.workspace = true 10 | repository.workspace = true 11 | homepage.workspace = true 12 | license.workspace = true 13 | build = "build.rs" 14 | 15 | [dependencies] 16 | am_list = { path = "../am_list" } 17 | anyhow = { workspace = true } 18 | autometrics = { version = "0.6.0", features = ["prometheus-exporter"] } 19 | autometrics-am = { path = "../autometrics-am" } 20 | axum = "0.6.18" 21 | clap = { version = "4.2.7", features = ["derive", "env"] } 22 | clap-markdown = { git = "https://github.com/keturiosakys/clap-markdown.git" } 23 | dialoguer = "0.11.0" 24 | directories = "5.0.1" 25 | flate2 = "1.0.26" 26 | futures-util = { version = "0.3.28", features = ["io"] } 27 | hex = "0.4.3" 28 | http = "0.2.9" 29 | humantime = { workspace = true } 30 | ignore = "0.4.20" 31 | include_dir = "0.7.3" 32 | indicatif = "0.17.5" 33 | itertools = "0.11.0" 34 | octocrab = "0.32.0" 35 | once_cell = "1.17.1" 36 | open = "5.0.0" 37 | rand = "0.8.5" 38 | remove_dir_all = "0.8.2" 39 | reqwest = { version = "0.11.18", default-features = false, features = [ 40 | "json", 41 | "rustls-tls", 42 | "stream", 43 | ] } 44 | self-replace = "1.3.5" 45 | semver_rs = "0.2.0" 46 | serde = { workspace = true } 47 | serde_json = "1.0.96" 48 | serde_yaml = "0.9.21" 49 | sha2 = "0.10.6" 50 | tar = "0.4.38" 51 | tempfile = "3.5.0" 52 | termcolor = "1.3.0" 53 | thiserror = "1.0.48" 54 | tokio = { version = "1.28.1", features = ["full"] } 55 | toml = "0.8.6" 56 | tracing = "0.1.37" 57 | tracing-subscriber = { version = "0.3.17", features = ["env-filter", "json"] } 58 | url = { workspace = true } 59 | 60 | [dev-dependencies] 61 | rstest = "0.18.2" 62 | -------------------------------------------------------------------------------- /am/build.rs: -------------------------------------------------------------------------------- 1 | use std::env::var; 2 | 3 | fn main() { 4 | // https://stackoverflow.com/a/51311222/11494565 5 | println!("cargo:rustc-env=TARGET={}", var("TARGET").unwrap()); 6 | } 7 | -------------------------------------------------------------------------------- /am/src/commands.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use autometrics_am::config::AmConfig; 3 | use clap::{Parser, Subcommand}; 4 | use indicatif::MultiProgress; 5 | use std::path::PathBuf; 6 | use tracing::info; 7 | 8 | mod explore; 9 | mod init; 10 | mod instrument; 11 | mod list; 12 | mod proxy; 13 | pub mod start; 14 | pub mod system; 15 | pub mod update; 16 | 17 | #[derive(Parser)] 18 | #[command(author, version, about, long_about = None, bin_name = "am")] 19 | pub struct Application { 20 | #[command(subcommand)] 21 | pub command: SubCommands, 22 | 23 | /// Enable verbose logging. By enabling this you are also able to use 24 | /// RUST_LOG environment variable to change the log levels of other 25 | /// modules. 26 | /// 27 | /// By default, we will only log INFO level messages of all modules. If this 28 | /// flag is enabled, then we will log the message from `am` with DEBUG 29 | /// level, other modules still use the INFO level. 30 | #[clap(long, short)] 31 | pub verbose: bool, 32 | 33 | /// Use the following file to define defaults for am. 34 | #[clap(long, env)] 35 | pub config_file: Option, 36 | } 37 | 38 | #[derive(Subcommand)] 39 | pub enum SubCommands { 40 | /// Start scraping the specified endpoint(s), while also providing a web 41 | /// interface to inspect the autometrics data. 42 | Start(start::CliArguments), 43 | 44 | /// Manage am related system settings. Such as cleaning up downloaded 45 | /// Prometheus, Pushgateway installs. 46 | System(system::Arguments), 47 | 48 | /// Open up the existing Explorer 49 | #[clap(alias = "explorer")] 50 | Explore(explore::Arguments), 51 | 52 | /// Use am as a proxy to another prometheus instance 53 | Proxy(proxy::CliArguments), 54 | 55 | /// Create a new `am.toml` file interactively with sensible defaults 56 | Init(init::Arguments), 57 | 58 | /// Open the Fiberplane discord to receive help, send suggestions or 59 | /// discuss various things related to Autometrics and the `am` CLI 60 | Discord, 61 | 62 | /// Run the updater 63 | Update(update::Arguments), 64 | 65 | /// List the functions in a project 66 | List(list::Arguments), 67 | 68 | /// Instrument a project entirely. 69 | /// 70 | /// IMPORTANT: This will add code in your files! If you want to easily 71 | /// undo the effects of this command, stage your work in progress (using `git add` or similar) 72 | /// So that a command like `git restore .` can undo all unstaged changes, leaving your work 73 | /// in progress alone. 74 | Instrument(instrument::Arguments), 75 | 76 | #[clap(hide = true)] 77 | MarkdownHelp, 78 | } 79 | 80 | pub async fn handle_command(app: Application, config: AmConfig, mp: MultiProgress) -> Result<()> { 81 | match app.command { 82 | SubCommands::Start(args) => start::handle_command(args, config, mp).await, 83 | SubCommands::System(args) => system::handle_command(args, mp).await, 84 | SubCommands::Explore(args) => explore::handle_command(args).await, 85 | SubCommands::Proxy(args) => proxy::handle_command(args).await, 86 | SubCommands::Init(args) => init::handle_command(args).await, 87 | SubCommands::Discord => { 88 | const URL: &str = "https://discord.gg/kHtwcH8As9"; 89 | 90 | if open::that(URL).is_err() { 91 | info!("Unable to open browser, open the following URL in your browser: {URL}"); 92 | } 93 | 94 | Ok(()) 95 | } 96 | SubCommands::Update(args) => update::handle_command(args, mp).await, 97 | SubCommands::List(args) => list::handle_command(args), 98 | SubCommands::Instrument(args) => instrument::handle_command(args), 99 | SubCommands::MarkdownHelp => { 100 | let disable_toc = true; 101 | clap_markdown::print_help_markdown::(Some(disable_toc)); 102 | Ok(()) 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /am/src/commands/explore.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use clap::Parser; 3 | use tracing::info; 4 | use url::Url; 5 | 6 | #[derive(Parser, Clone)] 7 | pub struct Arguments { 8 | /// The Prometheus endpoint that will be passed to Explorer 9 | #[clap(long, env)] 10 | prometheus_endpoint: Option, 11 | 12 | /// Which endpoint to open in the browser 13 | #[clap(long, env, default_value = "https://explorer.autometrics.dev/")] 14 | explorer_endpoint: Url, 15 | } 16 | 17 | pub async fn handle_command(mut args: Arguments) -> Result<()> { 18 | let url = &mut args.explorer_endpoint; 19 | 20 | if let Some(prom_url) = args.prometheus_endpoint { 21 | let query = format!("prometheusUrl={}", prom_url.as_str()); 22 | url.set_query(Some(&query)); 23 | } 24 | 25 | if open::that(url.as_str()).is_err() { 26 | info!( 27 | "Unable to open browser, open the following URL in your browser: {}", 28 | url.as_str() 29 | ); 30 | } 31 | 32 | Ok(()) 33 | } 34 | -------------------------------------------------------------------------------- /am/src/commands/init.rs: -------------------------------------------------------------------------------- 1 | use crate::interactive::{confirm, confirm_optional, user_input, user_input_optional}; 2 | use anyhow::{bail, Context, Result}; 3 | use autometrics_am::config::{AmConfig, Endpoint}; 4 | use clap::Parser; 5 | use std::fs; 6 | use std::path::PathBuf; 7 | use std::time::Duration; 8 | use tracing::info; 9 | use url::Url; 10 | 11 | #[derive(Parser, Clone)] 12 | pub struct Arguments { 13 | /// Where the file should be outputted to. Defaults to current directory 14 | #[clap(long, env, default_value = "./am.toml")] 15 | output: PathBuf, 16 | 17 | /// Whenever to forcefully override an existing `am.toml` file, if it already exists 18 | #[clap(long, env)] 19 | force: bool, 20 | } 21 | 22 | pub async fn handle_command(args: Arguments) -> Result<()> { 23 | if args.output.exists() && !args.force { 24 | bail!("Output file already exists. Supply --force to override"); 25 | } 26 | 27 | let mut endpoints = vec![]; 28 | 29 | while confirm("Do you want to add (more) endpoints?")? { 30 | endpoints.push(prompt_endpoint()?); 31 | } 32 | 33 | let pushgateway_enabled = 34 | confirm_optional("Do you want to enable the Pushgateway (optional)?")?; 35 | let scrape_interval = prompt_scrape_interval()?; 36 | 37 | let cfg = AmConfig { 38 | endpoints: if endpoints.is_empty() { 39 | None 40 | } else { 41 | Some(endpoints) 42 | }, 43 | pushgateway_enabled, 44 | prometheus_scrape_interval: scrape_interval, 45 | }; 46 | 47 | let config = toml::to_string(&cfg)?; 48 | fs::write(&args.output, config).context("failed to write file to disk")?; 49 | 50 | info!("Successfully written config to {}", args.output.display()); 51 | Ok(()) 52 | } 53 | 54 | fn prompt_endpoint() -> Result { 55 | let endpoint = user_input("Enter a metrics endpoint URL")?; 56 | let job_name = user_input_optional("Enter job name (optional)")?; 57 | let honor_labels = confirm_optional("honor_labels (optional)")?; 58 | let scrape_interval = prompt_scrape_interval()?; 59 | 60 | Ok(Endpoint { 61 | url: Url::parse(&endpoint)?, 62 | job_name, 63 | honor_labels, 64 | prometheus_scrape_interval: scrape_interval, 65 | }) 66 | } 67 | 68 | fn prompt_scrape_interval() -> Result> { 69 | Ok( 70 | user_input_optional("Scrape Interval in seconds (leave empty for default)")? 71 | .and_then(|i| i.parse().ok()) 72 | .map(Duration::from_secs), 73 | ) 74 | } 75 | -------------------------------------------------------------------------------- /am/src/commands/instrument.rs: -------------------------------------------------------------------------------- 1 | use crate::interactive; 2 | use am_list::Language; 3 | use anyhow::Context; 4 | use clap::{Args, Subcommand}; 5 | use std::{ 6 | path::{Path, PathBuf}, 7 | process, 8 | }; 9 | use tracing::info; 10 | 11 | #[derive(Args)] 12 | pub struct Arguments { 13 | #[command(subcommand)] 14 | command: Command, 15 | } 16 | 17 | #[derive(Subcommand)] 18 | enum Command { 19 | /// Instrument functions in a single project, giving the language implementation 20 | /// 21 | /// IMPORTANT: This will add code in your files! If you want to easily 22 | /// undo the effects of this command, stage your work in progress (using `git add` or similar) 23 | /// So that a command like `git restore .` can undo all unstaged changes, leaving your work 24 | /// in progress alone. 25 | Single(SingleProject), 26 | /// Instrument functions in all projects under the given directory, detecting languages on a best-effort basis. 27 | /// 28 | /// IMPORTANT: This will add code in your files! If you want to easily 29 | /// undo the effects of this command, stage your work in progress (using `git add` or similar) 30 | /// So that a command like `git restore .` can undo all unstaged changes, leaving your work 31 | /// in progress alone. 32 | All(AllProjects), 33 | } 34 | 35 | #[derive(Args)] 36 | struct SingleProject { 37 | /// Language to detect autometrics functions for. Valid values are: 38 | /// - 'rust' or 'rs' for Rust, 39 | /// - 'go' for Golang, 40 | /// - 'typescript', 'ts', 'javascript', or 'js' for Typescript/Javascript, 41 | /// - 'python' or 'py' for Python. 42 | #[arg(short, long, value_name = "LANGUAGE", verbatim_doc_comment)] 43 | language: Language, 44 | /// Root of the project to start the search on: 45 | /// - For Rust projects it must be where the Cargo.toml lie, 46 | /// - For Go projects it must be the root of the repository, 47 | /// - For Python projects it must be the root of the library, 48 | /// - For Typescript projects it must be where the package.json lie. 49 | #[arg(value_name = "ROOT", verbatim_doc_comment)] 50 | root: PathBuf, 51 | /// A list of patterns to exclude from instrumentation. The patterns follow .gitignore rules, so 52 | /// `--exclude "/vendor/"` will exclude all the vendor subdirectory only at the root, and adding 53 | /// a pattern that starts with `!` will unignore a file or directory 54 | #[arg(short, long, value_name = "PATTERNS")] 55 | exclude: Vec, 56 | } 57 | 58 | #[derive(Args)] 59 | struct AllProjects { 60 | /// Main directory to start the subprojects search on. am currently detects 61 | /// Rust (Cargo.toml), Typescript (package.json), and Golang (go.mod) 62 | /// projects. 63 | #[arg(value_name = "ROOT")] 64 | root: PathBuf, 65 | /// A list of patterns to exclude from instrumentation. The patterns follow .gitignore rules, so 66 | /// `--exclude "/vendor/"` will exclude all the vendor subdirectory only at the root, and adding 67 | /// a pattern that starts with `!` will unignore a file or directory 68 | #[arg(short, long, value_name = "PATTERNS")] 69 | exclude: Vec, 70 | } 71 | 72 | pub fn handle_command(args: Arguments) -> anyhow::Result<()> { 73 | match args.command { 74 | Command::Single(args) => handle_single_project(args), 75 | Command::All(args) => handle_all_projects(args), 76 | } 77 | } 78 | 79 | fn folder_has_unstaged_changes(root: &Path) -> Option { 80 | if cfg!(windows) { 81 | // TODO: figure out the Windows story 82 | return None; 83 | } 84 | 85 | if cfg!(unix) { 86 | // TODO: Figure out the non git story 87 | let git_diff = process::Command::new("git") 88 | .arg("-C") 89 | .arg(root.as_os_str()) 90 | .arg("diff") 91 | .output(); 92 | return match git_diff { 93 | Ok(output) => Some(!output.stdout.is_empty()), 94 | Err(_) => { 95 | // We either don't have git, or root is not within a repository 96 | None 97 | } 98 | }; 99 | } 100 | 101 | None 102 | } 103 | 104 | fn handle_all_projects(args: AllProjects) -> Result<(), anyhow::Error> { 105 | let root = args 106 | .root 107 | .canonicalize() 108 | .context("The path must be resolvable to an absolute path")?; 109 | 110 | if let Some(true) = folder_has_unstaged_changes(&root) { 111 | let cont = interactive::confirm("The targeted root folder seems to have unstaged changes. `am` will also change files in this folder.\nDo you wish to continue?")?; 112 | if !cont { 113 | return Ok(()); 114 | } 115 | } 116 | 117 | info!("Instrumenting functions in {}:", root.display()); 118 | 119 | let mut exclude_patterns_builder = ignore::gitignore::GitignoreBuilder::new(&root); 120 | for pattern in args.exclude { 121 | exclude_patterns_builder.add_line(None, &pattern)?; 122 | } 123 | let exclude_patterns = exclude_patterns_builder.build()?; 124 | 125 | am_list::instrument_all_project_files(&root, &exclude_patterns)?; 126 | 127 | println!("If your project has Golang files, you need to run `go generate` now."); 128 | 129 | Ok(()) 130 | } 131 | 132 | fn handle_single_project(args: SingleProject) -> Result<(), anyhow::Error> { 133 | let root = args 134 | .root 135 | .canonicalize() 136 | .context("The path must be resolvable to an absolute path")?; 137 | 138 | if let Some(true) = folder_has_unstaged_changes(&root) { 139 | let cont = interactive::confirm("The targeted root folder seems to have unstaged changes. `am` will also change files in this folder.\nDo you wish to continue?")?; 140 | if !cont { 141 | return Ok(()); 142 | } 143 | } 144 | info!("Instrumenting functions in {}:", root.display()); 145 | 146 | let mut exclude_patterns_builder = ignore::gitignore::GitignoreBuilder::new(&root); 147 | for pattern in args.exclude { 148 | exclude_patterns_builder.add_line(None, &pattern)?; 149 | } 150 | let exclude_patterns = exclude_patterns_builder.build()?; 151 | 152 | am_list::instrument_single_project_files(&root, args.language, &exclude_patterns)?; 153 | 154 | if args.language == Language::Go { 155 | println!("You need to run `go generate` now."); 156 | } 157 | 158 | Ok(()) 159 | } 160 | -------------------------------------------------------------------------------- /am/src/commands/list.rs: -------------------------------------------------------------------------------- 1 | use am_list::Language; 2 | use clap::{Args, Subcommand}; 3 | use std::path::PathBuf; 4 | use tracing::info; 5 | 6 | #[derive(Args)] 7 | pub struct Arguments { 8 | #[command(subcommand)] 9 | command: Command, 10 | } 11 | 12 | #[derive(Subcommand)] 13 | enum Command { 14 | /// List functions in a single project, giving the language implementation 15 | Single(SingleProject), 16 | /// List functions in all projects under the given directory, detecting languages on a best-effort basis. 17 | All(AllProjects), 18 | } 19 | 20 | #[derive(Args)] 21 | struct SingleProject { 22 | /// Language to detect autometrics functions for. Valid values are: 23 | /// - 'rust' or 'rs' for Rust, 24 | /// - 'go' for Golang, 25 | /// - 'typescript', 'ts', 'javascript', or 'js' for Typescript/Javascript, 26 | /// - 'python' or 'py' for Python. 27 | #[arg(short, long, value_name = "LANGUAGE", verbatim_doc_comment)] 28 | language: Language, 29 | /// Root of the project to start the search on: 30 | /// - For Rust projects it must be where the Cargo.toml lie, 31 | /// - For Go projects it must be the root of the repository, 32 | /// - For Python projects it must be the root of the library, 33 | /// - For Typescript projects it must be where the package.json lie. 34 | #[arg(value_name = "ROOT", verbatim_doc_comment)] 35 | root: PathBuf, 36 | /// List all functions instead of only the autometricized ones (defaults to false) 37 | #[arg(short, long, default_value = "false")] 38 | all_functions: bool, 39 | /// Pretty print the resulting JSON (defaults to false) 40 | #[arg(short, long, default_value = "false")] 41 | pretty: bool, 42 | } 43 | 44 | #[derive(Args)] 45 | struct AllProjects { 46 | /// Main directory to start the subprojects search on. am currently detects 47 | /// Rust (Cargo.toml), Typescript (package.json), and Golang (go.mod) 48 | /// projects. 49 | #[arg(value_name = "ROOT")] 50 | root: PathBuf, 51 | /// Pretty print the resulting JSON (defaults to false) 52 | #[arg(short, long, default_value = "false")] 53 | pretty: bool, 54 | } 55 | 56 | pub fn handle_command(args: Arguments) -> anyhow::Result<()> { 57 | match args.command { 58 | Command::Single(args) => handle_single_project(args), 59 | Command::All(args) => handle_all_projects(args), 60 | } 61 | } 62 | 63 | fn handle_all_projects(args: AllProjects) -> Result<(), anyhow::Error> { 64 | let root = args.root; 65 | info!("Listing functions in {}:", root.display()); 66 | let res = am_list::list_all_project_functions(&root)?; 67 | 68 | if args.pretty { 69 | println!("{}", serde_json::to_string_pretty(&res)?); 70 | } else { 71 | println!("{}", serde_json::to_string(&res)?); 72 | } 73 | info!( 74 | "Total: {} functions", 75 | res.values().map(|list| list.1.len()).sum::() 76 | ); 77 | 78 | Ok(()) 79 | } 80 | 81 | fn handle_single_project(args: SingleProject) -> Result<(), anyhow::Error> { 82 | let root = args.root; 83 | info!("Autometrics functions in {}:", root.display()); 84 | 85 | let res = am_list::list_single_project_functions(&root, args.language, args.all_functions)?; 86 | 87 | if args.pretty { 88 | println!("{}", serde_json::to_string_pretty(&res)?); 89 | } else { 90 | println!("{}", serde_json::to_string(&res)?); 91 | } 92 | info!("Total: {} functions", res.len()); 93 | 94 | Ok(()) 95 | } 96 | -------------------------------------------------------------------------------- /am/src/commands/proxy.rs: -------------------------------------------------------------------------------- 1 | use crate::server::start_web_server; 2 | use crate::terminal; 3 | use anyhow::{bail, Context, Result}; 4 | use clap::Parser; 5 | use directories::ProjectDirs; 6 | use std::collections::HashMap; 7 | use std::net::SocketAddr; 8 | use tokio::select; 9 | use tokio::sync::watch; 10 | use tracing::info; 11 | use url::Url; 12 | 13 | #[derive(Parser, Clone)] 14 | pub struct CliArguments { 15 | /// The listen address for the web server of am. 16 | /// 17 | /// This includes am's HTTP API, the explorer and the proxy to the Prometheus, Gateway, etc. 18 | #[clap( 19 | short, 20 | long, 21 | env, 22 | default_value = "127.0.0.1:6789", 23 | alias = "explorer-address" 24 | )] 25 | listen_address: SocketAddr, 26 | 27 | /// The upstream Prometheus URL 28 | #[clap(long, env, alias = "prometheus-address")] 29 | prometheus_url: Option, 30 | 31 | #[clap( 32 | long, 33 | env, 34 | default_value = "https://explorer.autometrics.dev", 35 | help_heading = "Location for static assets used by the explorer" 36 | )] 37 | static_assets_url: Url, 38 | } 39 | 40 | #[derive(Debug, Clone)] 41 | struct Arguments { 42 | listen_address: SocketAddr, 43 | prometheus_url: Option, 44 | static_assets_url: Url, 45 | } 46 | 47 | impl Arguments { 48 | fn new(args: CliArguments) -> Self { 49 | Arguments { 50 | listen_address: args.listen_address, 51 | prometheus_url: args.prometheus_url, 52 | static_assets_url: args.static_assets_url, 53 | } 54 | } 55 | } 56 | 57 | pub async fn handle_command(args: CliArguments) -> Result<()> { 58 | let args = Arguments::new(args); 59 | 60 | // First let's retrieve the directory for our application to store data in. 61 | let project_dirs = 62 | ProjectDirs::from("", "autometrics", "am").context("Unable to determine home directory")?; 63 | let local_data = project_dirs.data_local_dir().to_owned(); 64 | 65 | // Make sure that the local data directory exists for our application. 66 | std::fs::create_dir_all(&local_data) 67 | .with_context(|| format!("Unable to create data directory: {:?}", local_data))?; 68 | 69 | let (tx, _) = watch::channel(None); 70 | let (urls_tx, urls_rx) = watch::channel(HashMap::new()); 71 | 72 | // Start web server for hosting the explorer, am api and proxies to the enabled services. 73 | let web_server_task = async move { 74 | start_web_server( 75 | &args.listen_address, 76 | false, 77 | false, 78 | args.prometheus_url, 79 | args.static_assets_url, 80 | tx, 81 | urls_tx, 82 | ) 83 | .await 84 | }; 85 | 86 | terminal::wait_and_print_urls(urls_rx); 87 | 88 | select! { 89 | biased; 90 | 91 | _ = tokio::signal::ctrl_c() => { 92 | info!("SIGINT signal received, exiting..."); 93 | Ok(()) 94 | } 95 | 96 | Err(err) = web_server_task => { 97 | bail!("Web server exited with an error: {err:?}"); 98 | } 99 | 100 | else => { 101 | Ok(()) 102 | } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /am/src/commands/system.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use clap::{Parser, Subcommand}; 3 | use indicatif::MultiProgress; 4 | 5 | pub mod prune; 6 | 7 | #[derive(Parser)] 8 | #[command(author, version, about, long_about = None)] 9 | pub struct Arguments { 10 | #[command(subcommand)] 11 | pub command: SubCommands, 12 | } 13 | 14 | #[derive(Subcommand)] 15 | pub enum SubCommands { 16 | /// Delete all locally downloaded binaries. 17 | Prune(prune::Arguments), 18 | } 19 | 20 | pub async fn handle_command(args: Arguments, mp: MultiProgress) -> Result<()> { 21 | match args.command { 22 | SubCommands::Prune(args) => prune::handle_command(args, mp).await, 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /am/src/commands/system/prune.rs: -------------------------------------------------------------------------------- 1 | use crate::interactive; 2 | use anyhow::{bail, Context, Result}; 3 | use clap::Parser; 4 | use directories::ProjectDirs; 5 | use indicatif::MultiProgress; 6 | use std::io; 7 | use tracing::{debug, info}; 8 | 9 | #[derive(Parser)] 10 | #[command(author, version, about, long_about = None)] 11 | pub struct Arguments { 12 | /// Force the cleanup without asking for confirmation. 13 | #[clap(short, long, default_value = "false")] 14 | force: bool, 15 | } 16 | 17 | pub async fn handle_command(args: Arguments, _: MultiProgress) -> Result<()> { 18 | // If the users hasn't specified the `force` argument, then ask the user if 19 | // they want to continue. 20 | if !args.force && !interactive::confirm("Prune all am program files?")? { 21 | bail!("Pruning cancelled"); 22 | } 23 | 24 | // Get local directory 25 | let project_dirs = 26 | ProjectDirs::from("", "autometrics", "am").context("Unable to determine home directory")?; 27 | let local_data = project_dirs.data_local_dir().to_owned(); 28 | 29 | debug!("Deleting all content from {:?}", local_data); 30 | 31 | // For now just greedily delete everything in the local data directory for am 32 | if let Err(err) = remove_dir_all::remove_dir_contents(&local_data) { 33 | // If the root directory does not exist, we can ignore the error (NOTE: 34 | // I don't know if it is possible to get this error in any other 35 | // situations) 36 | if err.kind() != io::ErrorKind::NotFound { 37 | return Err(err.into()); 38 | } 39 | } 40 | 41 | info!("Pruning complete"); 42 | Ok(()) 43 | } 44 | -------------------------------------------------------------------------------- /am/src/commands/update.rs: -------------------------------------------------------------------------------- 1 | use crate::commands::start::CLIENT; 2 | use crate::downloader::download_github_release; 3 | use anyhow::{anyhow, bail, Context, Result}; 4 | use clap::Parser; 5 | use directories::ProjectDirs; 6 | use indicatif::MultiProgress; 7 | use itertools::Itertools; 8 | use octocrab::models::repos::{Asset, Release}; 9 | use self_replace::self_replace; 10 | use semver_rs::Version; 11 | use std::fs::{File, OpenOptions}; 12 | use std::time::{Duration, SystemTime}; 13 | use std::{env, fs}; 14 | use tracing::{debug, error, info, trace, warn}; 15 | 16 | const AUTOMETRICS_GITHUB_ORG: &str = "autometrics-dev"; 17 | const AUTOMETRICS_AM_REPO: &str = "am"; 18 | 19 | #[derive(Parser)] 20 | pub struct Arguments { 21 | /// Whenever to ignore Homebrew checks and forcefully update 22 | #[clap(long, short)] 23 | force: bool, 24 | } 25 | 26 | pub(crate) async fn handle_command(args: Arguments, mp: MultiProgress) -> Result<()> { 27 | let release = latest_release().await?; 28 | 29 | if !update_needed(&release)? { 30 | info!("Already on the latest version"); 31 | return Ok(()); 32 | } 33 | 34 | let new_tag = release.tag_name; 35 | 36 | if is_homebrew() && !args.force { 37 | info!("A new version of `am` is available: {new_tag}"); 38 | info!("You can update by running `brew upgrade am` (or use `am update --force`)"); 39 | return Ok(()); 40 | } 41 | 42 | info!("Updating to {new_tag}"); 43 | 44 | let asset_needed = asset_needed()?; 45 | 46 | let assets: Option<(&Asset, &Asset)> = release 47 | .assets 48 | .iter() 49 | .filter(|a| a.name.starts_with(asset_needed)) 50 | .sorted_by(|a, b| a.name.cmp(&b.name)) 51 | .collect_tuple(); 52 | 53 | if assets.is_none() { 54 | error!("Could not find release for your target platform."); 55 | return Ok(()); 56 | } 57 | 58 | // .unwrap is safe because we checked above if its none 59 | // because of .sorted_by above (which sorts by name), the .sha256 file will be the second one *guaranteed* 60 | let (binary_asset, sha256_asset) = assets.unwrap(); 61 | 62 | let executable = env::current_exe()?; 63 | let temp_exe = executable 64 | .parent() 65 | .ok_or_else(|| anyhow!("Parent directory not found"))? 66 | .join("am_update.part"); 67 | 68 | let file = File::create(&temp_exe)?; 69 | 70 | let calculated_checksum = download_github_release( 71 | &file, 72 | AUTOMETRICS_GITHUB_ORG, 73 | AUTOMETRICS_AM_REPO, 74 | new_tag.strip_prefix('v').unwrap_or(&new_tag), 75 | &binary_asset.name, 76 | &mp, 77 | ) 78 | .await?; 79 | 80 | let checksum_line = CLIENT 81 | .get(sha256_asset.browser_download_url.clone()) 82 | .send() 83 | .await? 84 | .text() 85 | .await?; 86 | 87 | let remote_checksum = checksum_line 88 | .split_once(' ') 89 | .map(|(checksum, _)| checksum) 90 | .unwrap_or(&checksum_line); 91 | 92 | if calculated_checksum != remote_checksum { 93 | debug!( 94 | %remote_checksum, 95 | %calculated_checksum, "Calculated sha256 hash does not match the remote sha256 hash" 96 | ); 97 | 98 | fs::remove_file(&temp_exe).context("Failed to delete file that failed checksum match")?; 99 | drop(temp_exe); 100 | 101 | bail!("Calculated sha256 hash does not match the remote sha256 hash"); 102 | } 103 | 104 | self_replace(&temp_exe).context("failed to replace self")?; 105 | fs::remove_file(&temp_exe).context("failed to delete updater file")?; 106 | 107 | info!("Successfully updated to {new_tag}"); 108 | Ok(()) 109 | } 110 | 111 | pub(crate) async fn update_check() { 112 | let Some(project_dirs) = ProjectDirs::from("", "autometrics", "am") else { 113 | warn!("failed to run update checker: home directory does not exist"); 114 | return; 115 | }; 116 | 117 | let config_dir = project_dirs.config_dir(); 118 | 119 | if let Err(err) = fs::create_dir_all(config_dir) { 120 | error!(?err, "failed to create config directory"); 121 | return; 122 | } 123 | 124 | let check_file = config_dir.join("version_check"); 125 | 126 | let should_check = match fs::metadata(&check_file) { 127 | Ok(metadata) => { 128 | if let Ok(date) = metadata.modified() { 129 | date < (SystemTime::now() - Duration::from_secs(60 * 60 * 24)) 130 | } else { 131 | false 132 | } 133 | } 134 | Err(err) => { 135 | // This will most likely be caused by the file not existing, so we 136 | // will just trace it and go ahead with the version check. 137 | trace!(%err, "checking the update file check resulted in a error"); 138 | true 139 | } 140 | }; 141 | 142 | // We've checked the version recently, so just return early indicating that 143 | // no update should be done. 144 | if !should_check { 145 | return; 146 | } 147 | 148 | let Ok(release) = latest_release().await else { 149 | return; 150 | }; 151 | let Ok(needs_update) = update_needed(&release) else { 152 | return; 153 | }; 154 | 155 | if let Err(err) = OpenOptions::new() 156 | .create(true) 157 | .write(true) 158 | .truncate(true) 159 | .open(&check_file) 160 | { 161 | trace!(?err, "failed to create `version_check` file"); 162 | } 163 | 164 | if !needs_update { 165 | return; 166 | } 167 | 168 | info!("New update is available: {}", release.tag_name); 169 | } 170 | 171 | fn update_needed(release: &Release) -> Result { 172 | let current_tag = Version::new(env!("CARGO_PKG_VERSION")).parse()?; 173 | let new_tag = Version::new( 174 | release 175 | .tag_name 176 | .strip_prefix('v') 177 | .unwrap_or(&release.tag_name), 178 | ) 179 | .parse()?; 180 | 181 | Ok(new_tag > current_tag) 182 | } 183 | 184 | async fn latest_release() -> Result { 185 | octocrab::instance() 186 | .repos(AUTOMETRICS_GITHUB_ORG, AUTOMETRICS_AM_REPO) 187 | .releases() 188 | .get_latest() 189 | .await 190 | .context("failed to check latest release from GitHub") 191 | } 192 | 193 | fn asset_needed() -> Result<&'static str> { 194 | Ok(match env!("TARGET") { 195 | "x86_64-unknown-linux-gnu" => "am-linux-x86_64", 196 | "aarch64-unknown-linux-gnu" => "am-linux-aarch64", 197 | "x86_64-apple-darwin" => "am-macos-aarch64", 198 | "aarch64-apple-darwin" => "am-macos-x86_64", 199 | target => bail!("unsupported target: {target}"), 200 | }) 201 | } 202 | 203 | #[inline] 204 | fn is_homebrew() -> bool { 205 | #[cfg(target_os = "linux")] 206 | return env::current_exe() 207 | .map(|path| path.starts_with("/home/linuxbrew/.linuxbrew")) 208 | .unwrap_or_default(); 209 | 210 | #[cfg(target_os = "macos")] 211 | return env::current_exe() 212 | .map(|path| path.starts_with("/usr/local") || path.starts_with("/opt/homebrew")) 213 | .unwrap_or_default(); 214 | 215 | #[cfg(all(not(target_os = "linux"), not(target_os = "macos")))] 216 | return false; 217 | } 218 | -------------------------------------------------------------------------------- /am/src/dir.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use std::ops::Deref; 3 | use std::path::{Path, PathBuf}; 4 | use std::{env, fs}; 5 | use tracing::warn; 6 | 7 | pub struct AutoCleanupDir { 8 | path: PathBuf, 9 | ephemeral: bool, 10 | } 11 | 12 | impl AutoCleanupDir { 13 | pub(crate) fn new(process: &str, ephemeral: bool) -> Result { 14 | let start_dir = if ephemeral { 15 | env::temp_dir() 16 | } else { 17 | env::current_dir()? 18 | }; 19 | 20 | let path = start_dir.join(".autometrics").join(process); 21 | fs::create_dir_all(&path)?; 22 | 23 | Ok(AutoCleanupDir { path, ephemeral }) 24 | } 25 | } 26 | 27 | impl Drop for AutoCleanupDir { 28 | fn drop(&mut self) { 29 | if self.ephemeral { 30 | if let Err(err) = fs::remove_dir_all(&self) { 31 | warn!( 32 | ?err, 33 | "failed to remove data directory despite --ephemeral being passed" 34 | ); 35 | } 36 | } 37 | } 38 | } 39 | 40 | impl Deref for AutoCleanupDir { 41 | type Target = PathBuf; 42 | 43 | fn deref(&self) -> &Self::Target { 44 | &self.path 45 | } 46 | } 47 | 48 | impl AsRef for AutoCleanupDir { 49 | fn as_ref(&self) -> &Path { 50 | self.path.as_path() 51 | } 52 | } 53 | 54 | #[test] 55 | fn test_temp_dir() { 56 | let path; 57 | 58 | { 59 | let dir = AutoCleanupDir::new("unit_test", true).unwrap(); 60 | 61 | path = dir.path.clone(); 62 | assert!(dir.path.exists()); 63 | } 64 | 65 | // `dir` gets dropped right at `}` above, so it shouldn't exist anymore on fs as well 66 | assert!(!path.exists()); 67 | } 68 | -------------------------------------------------------------------------------- /am/src/downloader.rs: -------------------------------------------------------------------------------- 1 | use crate::commands::start::CLIENT; 2 | use anyhow::{anyhow, bail, Result}; 3 | use flate2::read::GzDecoder; 4 | use indicatif::{MultiProgress, ProgressBar, ProgressState, ProgressStyle}; 5 | use sha2::{Digest, Sha256}; 6 | use std::fmt; 7 | use std::fs::File; 8 | use std::io::{BufWriter, Write}; 9 | use std::path::Path; 10 | use std::time::Duration; 11 | use tracing::{debug, error}; 12 | 13 | /// downloads `package` into `destination`, returning the sha256sum hex-digest of the downloaded file 14 | pub async fn download_github_release( 15 | destination: &File, 16 | org: &str, 17 | repo: &str, 18 | version: &str, 19 | package: &str, 20 | multi_progress: &MultiProgress, 21 | ) -> Result { 22 | let mut hasher = Sha256::new(); 23 | let mut response = CLIENT 24 | .get(format!( 25 | "https://github.com/{org}/{repo}/releases/download/v{version}/{package}" 26 | )) 27 | .send() 28 | .await? 29 | .error_for_status()?; 30 | 31 | let total_size = response 32 | .content_length() 33 | .ok_or_else(|| anyhow!("didn't receive content length"))?; 34 | let mut downloaded = 0; 35 | 36 | let pb = multi_progress.add(ProgressBar::new(total_size)); 37 | 38 | // https://github.com/console-rs/indicatif/blob/HEAD/examples/download.rs#L12 39 | pb.set_style( 40 | ProgressStyle::with_template("{spinner:.green} [{elapsed_precise}] {msg} [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({eta})")? 41 | .with_key("eta", |state: &ProgressState, w: &mut dyn fmt::Write| write!(w, "{:.1}s", state.eta().as_secs_f64()).unwrap()) 42 | .progress_chars("=> ") 43 | ); 44 | 45 | pb.set_message(format!( 46 | "Downloading {package} from github.com/{org}/{repo}" 47 | )); 48 | 49 | let mut buffer = BufWriter::new(destination); 50 | 51 | while let Some(ref chunk) = response.chunk().await? { 52 | buffer.write_all(chunk)?; 53 | hasher.update(chunk); 54 | 55 | let new_size = (downloaded + chunk.len() as u64).min(total_size); 56 | downloaded = new_size; 57 | 58 | pb.set_position(downloaded); 59 | } 60 | 61 | pb.finish_and_clear(); 62 | multi_progress.remove(&pb); 63 | 64 | let checksum = hex::encode(hasher.finalize()); 65 | Ok(checksum) 66 | } 67 | 68 | pub async fn verify_checksum( 69 | sha256sum: &str, 70 | org: &str, 71 | repo: &str, 72 | version: &str, 73 | package: &str, 74 | ) -> Result<()> { 75 | let checksums = CLIENT 76 | .get(format!( 77 | "https://github.com/{org}/{repo}/releases/download/v{version}/sha256sums.txt" 78 | )) 79 | .send() 80 | .await? 81 | .error_for_status()? 82 | .text() 83 | .await?; 84 | 85 | // Go through all the lines in the checksum file and look for the one that 86 | // we need for our current service/version/os/arch. 87 | let expected_checksum = checksums 88 | .lines() 89 | .find_map(|line| match line.split_once(" ") { 90 | Some((checksum, filename)) if package == filename => Some(checksum), 91 | _ => None, 92 | }) 93 | .ok_or_else(|| anyhow!("unable to find checksum for {package} in checksum list"))?; 94 | 95 | if expected_checksum != sha256sum { 96 | error!( 97 | ?expected_checksum, 98 | calculated_checksum = ?sha256sum, 99 | "Calculated checksum for downloaded archive did not match expected checksum", 100 | ); 101 | bail!("checksum did not match"); 102 | } 103 | 104 | Ok(()) 105 | } 106 | 107 | pub async fn unpack( 108 | archive: &File, 109 | package: &str, 110 | destination_path: &Path, 111 | prefix: &str, 112 | multi_progress: &MultiProgress, 113 | ) -> Result<()> { 114 | let tar_file = GzDecoder::new(archive); 115 | let mut ar = tar::Archive::new(tar_file); 116 | 117 | let pb = multi_progress.add(ProgressBar::new_spinner()); 118 | pb.set_style(ProgressStyle::default_spinner()); 119 | pb.enable_steady_tick(Duration::from_millis(120)); 120 | pb.set_message(format!("Unpacking {package}...")); 121 | 122 | for entry in ar.entries()? { 123 | let mut entry = entry?; 124 | let path = entry.path()?; 125 | 126 | debug!("Unpacking {}", path.display()); 127 | 128 | // Remove the prefix and join it with the base directory. 129 | let path = path.strip_prefix(prefix)?.to_owned(); 130 | let path = destination_path.join(path); 131 | 132 | entry.unpack(&path)?; 133 | } 134 | 135 | pb.finish_and_clear(); 136 | multi_progress.remove(&pb); 137 | Ok(()) 138 | } 139 | -------------------------------------------------------------------------------- /am/src/interactive.rs: -------------------------------------------------------------------------------- 1 | use dialoguer::theme::SimpleTheme; 2 | use dialoguer::{Confirm, Input}; 3 | use indicatif::MultiProgress; 4 | use std::io::{stderr, IoSlice, Result, Write}; 5 | use tracing_subscriber::fmt::MakeWriter; 6 | 7 | pub fn user_input(prompt: impl Into) -> dialoguer::Result { 8 | Input::with_theme(&SimpleTheme) 9 | .with_prompt(prompt) 10 | .interact_text() 11 | } 12 | 13 | pub fn user_input_optional(prompt: impl Into) -> dialoguer::Result> { 14 | let input: String = Input::with_theme(&SimpleTheme) 15 | .with_prompt(prompt) 16 | .allow_empty(true) 17 | .interact_text()?; 18 | 19 | Ok(if input.is_empty() { None } else { Some(input) }) 20 | } 21 | 22 | pub fn confirm(prompt: impl Into) -> dialoguer::Result { 23 | Confirm::with_theme(&SimpleTheme) 24 | .with_prompt(prompt) 25 | .interact() 26 | } 27 | 28 | pub fn confirm_optional(prompt: impl Into) -> dialoguer::Result> { 29 | Confirm::with_theme(&SimpleTheme) 30 | .with_prompt(prompt) 31 | .interact_opt() 32 | } 33 | 34 | /// A Writer that will output to stderr. It will also suspend any progress bars, 35 | /// so that the output of the progress bar is not mangled. 36 | /// 37 | /// The main use case for this is to use it in conjunction with other components 38 | /// that write to stderr, such as the tracing library. If both indicatif and 39 | /// tracing would be using stderr directly, it would result in progress bars 40 | /// being interrupted by other output. 41 | #[derive(Clone)] 42 | pub struct IndicatifWriter { 43 | multi_progress: indicatif::MultiProgress, 44 | } 45 | 46 | impl IndicatifWriter { 47 | /// Create a new IndicatifWriter. Make sure to use the returned 48 | /// MultiProgress when creating any progress bars. 49 | pub fn new() -> (Self, MultiProgress) { 50 | let multi_progress = MultiProgress::new(); 51 | ( 52 | Self { 53 | multi_progress: multi_progress.clone(), 54 | }, 55 | multi_progress, 56 | ) 57 | } 58 | } 59 | 60 | impl Write for IndicatifWriter { 61 | fn write(&mut self, buf: &[u8]) -> Result { 62 | self.multi_progress.suspend(|| stderr().write(buf)) 63 | } 64 | 65 | fn flush(&mut self) -> Result<()> { 66 | self.multi_progress.suspend(|| stderr().flush()) 67 | } 68 | 69 | fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> Result { 70 | self.multi_progress 71 | .suspend(|| stderr().write_vectored(bufs)) 72 | } 73 | 74 | fn write_all(&mut self, buf: &[u8]) -> Result<()> { 75 | self.multi_progress.suspend(|| stderr().write_all(buf)) 76 | } 77 | 78 | fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> Result<()> { 79 | self.multi_progress.suspend(|| stderr().write_fmt(fmt)) 80 | } 81 | } 82 | 83 | impl<'a> MakeWriter<'a> for IndicatifWriter { 84 | type Writer = IndicatifWriter; 85 | 86 | fn make_writer(&'a self) -> Self::Writer { 87 | self.clone() 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /am/src/main.rs: -------------------------------------------------------------------------------- 1 | use crate::commands::update; 2 | use anyhow::{bail, Context, Result}; 3 | use autometrics::prometheus_exporter; 4 | use autometrics_am::config::AmConfig; 5 | use clap::Parser; 6 | use commands::{handle_command, Application}; 7 | use interactive::IndicatifWriter; 8 | use std::path::PathBuf; 9 | use std::time::Duration; 10 | use tokio::time::timeout; 11 | use tracing::level_filters::LevelFilter; 12 | use tracing::{debug, error, warn}; 13 | use tracing_subscriber::fmt::format; 14 | use tracing_subscriber::layer::SubscriberExt; 15 | use tracing_subscriber::util::SubscriberInitExt; 16 | use tracing_subscriber::{EnvFilter, Layer, Registry}; 17 | 18 | mod commands; 19 | mod dir; 20 | mod downloader; 21 | mod interactive; 22 | mod server; 23 | mod terminal; 24 | 25 | #[tokio::main] 26 | async fn main() { 27 | let app = Application::parse(); 28 | 29 | let (writer, multi_progress) = IndicatifWriter::new(); 30 | 31 | if let Err(err) = init_logging(&app, writer) { 32 | eprintln!("Unable to initialize logging: {:#}", err); 33 | std::process::exit(1); 34 | } 35 | 36 | let task = if std::env::var_os("AM_NO_UPDATE").is_none() { 37 | tokio::task::spawn(update::update_check()) 38 | } else { 39 | tokio::task::spawn(async { /* intentionally left empty */ }) 40 | }; 41 | 42 | let config = match load_config(app.config_file.clone()).await { 43 | Ok(config) => config, 44 | Err(err) => { 45 | error!("Unable to load config: {:?}", err); 46 | std::process::exit(1); 47 | } 48 | }; 49 | 50 | if let Err(err) = prometheus_exporter::try_init() { 51 | warn!(?err, "Unable to initialize prometheus exporter"); 52 | } 53 | 54 | let result = handle_command(app, config, multi_progress).await; 55 | 56 | if let Err(err) = timeout(Duration::from_secs(1), task).await { 57 | warn!(?err, "background update check timed out"); 58 | } 59 | 60 | match result { 61 | Ok(_) => debug!("Command completed successfully"), 62 | Err(err) => { 63 | error!("Command failed: {:?}", err); 64 | std::process::exit(1); 65 | } 66 | } 67 | } 68 | 69 | /// Initialize logging for the application. 70 | /// 71 | /// Currently, we check for the `RUST_LOG` environment variable and use that for 72 | /// logging. If it isn't set or contains a invalid directive, we will show _all_ 73 | /// logs from INFO level. 74 | /// 75 | /// For example: for local development it is convenient to set the environment 76 | /// variable to `RUST_LOG=am=trace,info`. This will display all log messages 77 | /// within the `am` module, but will only show info for other modules. 78 | fn init_logging(app: &Application, writer: IndicatifWriter) -> Result<()> { 79 | let (filter_layer, log_layer) = if app.verbose { 80 | let filter_layer = EnvFilter::try_from_default_env() 81 | .unwrap_or_else(|_| EnvFilter::try_new("am=debug,info").unwrap()); 82 | 83 | let log_layer = tracing_subscriber::fmt::layer().with_writer(writer).boxed(); 84 | 85 | (filter_layer, log_layer) 86 | } else { 87 | let filter_layer = EnvFilter::default().add_directive(LevelFilter::INFO.into()); 88 | 89 | // Create a custom field formatter, which only outputs the `message` 90 | // field, all other fields are ignored. 91 | let field_formatter = format::debug_fn(|writer, field, value| { 92 | if field.name() == "message" { 93 | write!(writer, "{value:?}") 94 | } else { 95 | Ok(()) 96 | } 97 | }); 98 | 99 | let log_layer = tracing_subscriber::fmt::layer() 100 | .fmt_fields(field_formatter) 101 | .without_time() 102 | .with_level(false) 103 | .with_span_events(format::FmtSpan::NONE) 104 | .with_target(false) 105 | .with_writer(writer) 106 | .boxed(); 107 | 108 | (filter_layer, log_layer) 109 | }; 110 | 111 | Registry::default() 112 | .with(filter_layer) 113 | .with(log_layer) 114 | .try_init() 115 | .context("unable to initialize logger")?; 116 | 117 | Ok(()) 118 | } 119 | 120 | /// Try to load the config from the specified path. If the file doesn't exist it 121 | /// will return a AmConfig with all its defaults set. If it is invalid toml file 122 | /// it will return an error. 123 | async fn load_config(config_file: Option) -> Result { 124 | let (path, is_default) = match config_file { 125 | Some(path) => (path, false), 126 | None => (PathBuf::from("./am.toml"), true), 127 | }; 128 | 129 | debug!(?path, "Loading config"); 130 | 131 | match tokio::fs::read_to_string(path).await { 132 | Ok(contents) => { 133 | debug!("Found config file, parsing"); 134 | let config = 135 | toml::from_str(&contents).context("config file contains invalid toml contents")?; 136 | Ok(config) 137 | } 138 | Err(err) => { 139 | if is_default { 140 | debug!(?err, "No config file found, using defaults"); 141 | Ok(AmConfig::default()) 142 | } else { 143 | bail!("Unable to read config file: {}", err); 144 | } 145 | } 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /am/src/server.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | use autometrics::prometheus_exporter; 3 | use axum::body::Body; 4 | use axum::response::Redirect; 5 | use axum::routing::{any, get}; 6 | use axum::{Router, Server}; 7 | use http::header::CONNECTION; 8 | use std::collections::HashMap; 9 | use std::net::SocketAddr; 10 | use std::sync::Arc; 11 | use tokio::sync::watch::Sender; 12 | use tracing::debug; 13 | use url::Url; 14 | 15 | use crate::server::util::proxy_handler; 16 | 17 | mod explorer; 18 | mod functions; 19 | mod prometheus; 20 | mod pushgateway; 21 | mod util; 22 | 23 | pub(crate) async fn start_web_server( 24 | listen_address: &SocketAddr, 25 | enable_prometheus: bool, 26 | enable_pushgateway: bool, 27 | prometheus_proxy_url: Option, 28 | static_assets_url: Url, 29 | tx: Sender>, 30 | tx_url: Sender>, 31 | ) -> Result<()> { 32 | let is_proxying_prometheus = prometheus_proxy_url.is_some(); 33 | let should_enable_prometheus = enable_prometheus && !is_proxying_prometheus; 34 | 35 | let explorer_static_handler = move |mut req: http::Request| async move { 36 | *req.uri_mut() = req 37 | .uri() 38 | .path_and_query() 39 | .unwrap() 40 | .as_str() 41 | .replace("/explorer/static", "/static") 42 | .parse() 43 | .unwrap(); 44 | // Remove the connection header to prevent issues when proxying content from HTTP/2 upstreams 45 | req.headers_mut().remove(CONNECTION); 46 | proxy_handler(req, static_assets_url.clone()).await 47 | }; 48 | let mut app = Router::new() 49 | // Any calls to the root should be redirected to the explorer which is most likely what the user wants to use. 50 | .route("/", get(|| async { Redirect::temporary("/explorer/") })) 51 | .route( 52 | "/explorer", 53 | get(|| async { Redirect::permanent("/explorer/") }), 54 | ) 55 | .route( 56 | "/graph", 57 | get(|req: http::Request| async move { 58 | let query = req.uri().query().unwrap_or_default(); 59 | Redirect::temporary(&format!("/explorer/graph.html?{query}")) 60 | }), 61 | ) 62 | .route("/explorer/", get(explorer::handler)) 63 | .route("/explorer/static/*path", get(explorer_static_handler)) 64 | .route("/explorer/*path", get(explorer::handler)) 65 | .route("/api/functions", get(functions::all_functions)) 66 | .route( 67 | "/self_metrics", 68 | get(|| async { prometheus_exporter::encode_http_response() }), 69 | ); 70 | 71 | // Proxy `/prometheus` to the upstream (local) prometheus instance 72 | if should_enable_prometheus { 73 | app = app 74 | .route("/prometheus/*path", any(prometheus::handler)) 75 | .route("/prometheus", any(prometheus::handler)); 76 | } 77 | 78 | // NOTE - this will override local prometheus routes if specified 79 | if is_proxying_prometheus { 80 | let prometheus_upstream_base = Arc::new(prometheus_proxy_url.clone().unwrap()); 81 | 82 | // Define a handler that will proxy to an external Prometheus instance 83 | let handler = move |mut req: http::Request| { 84 | let upstream_base = prometheus_upstream_base.clone(); 85 | // 1. Get the path and query from the request, since we need to strip out `/prometheus` 86 | let path_and_query = req 87 | .uri() 88 | .path_and_query() 89 | .map(|pq| pq.as_str()) 90 | .unwrap_or(""); 91 | if let Some(stripped_path) = path_and_query.strip_prefix("/prometheus") { 92 | let stripped_path_str = stripped_path.to_string(); 93 | // 2. Remove the `/prometheus` prefix. 94 | let new_path_and_query = 95 | http::uri::PathAndQuery::from_maybe_shared(stripped_path_str) 96 | .expect("Invalid path"); 97 | 98 | // 3. Create a new URI with the modified path. 99 | let mut new_uri_parts = req.uri().clone().into_parts(); 100 | new_uri_parts.path_and_query = Some(new_path_and_query); 101 | 102 | let new_uri = http::Uri::from_parts(new_uri_parts).expect("Invalid URI"); 103 | 104 | // 4. Replace the request's URI with the modified URI. 105 | *req.uri_mut() = new_uri; 106 | } 107 | async move { prometheus::handler_with_url(req, &upstream_base).await } 108 | }; 109 | 110 | app = app 111 | .route("/prometheus/*path", any(handler.clone())) 112 | .route("/prometheus", any(handler)); 113 | } 114 | 115 | if enable_pushgateway { 116 | app = app 117 | .route("/metrics", any(pushgateway::metrics_proxy_handler)) 118 | .route("/pushgateway/*path", any(pushgateway::handler)) 119 | .route("/pushgateway", any(pushgateway::handler)); 120 | } 121 | 122 | let server = Server::try_bind(listen_address) 123 | .with_context(|| format!("failed to bind to {}", listen_address))? 124 | .serve(app.into_make_service()); 125 | 126 | tx.send_replace(Some(server.local_addr())); 127 | 128 | debug!("Web server listening on {}", server.local_addr()); 129 | 130 | let mut urls = HashMap::from([("Explorer", format!("http://{}", server.local_addr()))]); 131 | 132 | if should_enable_prometheus { 133 | urls.insert("Prometheus", "http://127.0.0.1:9090/prometheus".to_string()); 134 | } 135 | 136 | if is_proxying_prometheus { 137 | urls.insert( 138 | "Prometheus Proxy Destination", 139 | prometheus_proxy_url.unwrap().to_string(), 140 | ); 141 | } 142 | 143 | if enable_pushgateway { 144 | urls.insert( 145 | "Pushgateway", 146 | "http://127.0.0.1:9091/pushgateway".to_string(), 147 | ); 148 | } 149 | 150 | tx_url.send_replace(urls); 151 | server.await?; 152 | 153 | Ok(()) 154 | } 155 | -------------------------------------------------------------------------------- /am/src/server/explorer.rs: -------------------------------------------------------------------------------- 1 | use autometrics::autometrics; 2 | use axum::body; 3 | use axum::extract::Path; 4 | use axum::response::{IntoResponse, Response}; 5 | use http::StatusCode; 6 | use include_dir::{include_dir, Dir}; 7 | use tracing::{error, trace, warn}; 8 | 9 | static STATIC_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/../files/explorer"); 10 | 11 | #[autometrics] 12 | pub(crate) async fn handler(optional_path: Option>) -> impl IntoResponse { 13 | let path = optional_path.map_or_else(|| "index.html".to_string(), |path| path.0); 14 | 15 | trace!(?path, "Serving static file"); 16 | 17 | match STATIC_DIR.get_file(&path) { 18 | None => { 19 | warn!(?path, "Request file was not found in the explorer assets"); 20 | StatusCode::NOT_FOUND.into_response() 21 | } 22 | Some(file) => Response::builder() 23 | .status(StatusCode::OK) 24 | .body(body::boxed(body::Full::from(file.contents()))) 25 | .map(|res| res.into_response()) 26 | .unwrap_or_else(|err| { 27 | error!("Failed to build response: {}", err); 28 | StatusCode::INTERNAL_SERVER_ERROR.into_response() 29 | }), 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /am/src/server/functions.rs: -------------------------------------------------------------------------------- 1 | use autometrics::autometrics; 2 | use axum::response::{IntoResponse, Response}; 3 | use axum::Json; 4 | use http::StatusCode; 5 | use serde::{Deserialize, Serialize}; 6 | use thiserror::Error; 7 | 8 | #[autometrics] 9 | pub(crate) async fn all_functions() -> Result { 10 | let functions = am_list::list_all_project_functions( 11 | std::env::current_dir() 12 | .map_err(|_| AllFunctionError::DirNotFound)? 13 | .as_path(), 14 | ) 15 | .map_err(|err| AllFunctionError::AmListError(format!("{err:?}")))?; 16 | 17 | let mut output = vec![]; 18 | 19 | for (path, (language, language_functions)) in functions { 20 | for func in language_functions { 21 | let mut value = 22 | serde_json::to_value(&func).map_err(|_| AllFunctionError::SerdeError)?; 23 | 24 | // this is in a separate block so the mutable reference gets dropped before we try to move the value in the last line 25 | { 26 | let obj = value.as_object_mut().ok_or(AllFunctionError::NonObject)?; 27 | 28 | obj.insert( 29 | "language".to_string(), 30 | serde_json::to_value(language).map_err(|_| AllFunctionError::SerdeError)?, 31 | ); 32 | 33 | obj.insert( 34 | "path".to_string(), 35 | serde_json::to_value(path.to_string_lossy()) 36 | .map_err(|_| AllFunctionError::SerdeError)?, 37 | ); 38 | } 39 | 40 | output.push(value); 41 | } 42 | } 43 | 44 | Ok(Json(output)) 45 | } 46 | 47 | #[derive(Deserialize, Serialize, Debug, Error)] 48 | #[serde(tag = "error", content = "details", rename_all = "snake_case")] 49 | pub(crate) enum AllFunctionError { 50 | #[error("`FunctionInfo` needs to serialize to a `Value::Object`")] 51 | NonObject, 52 | 53 | #[error("unable to determinate current working directory")] 54 | DirNotFound, 55 | 56 | #[error("{0}")] 57 | AmListError(String), 58 | 59 | #[error("serde error")] 60 | SerdeError, 61 | } 62 | 63 | impl IntoResponse for AllFunctionError { 64 | fn into_response(self) -> Response { 65 | (StatusCode::INTERNAL_SERVER_ERROR, Json(self)).into_response() 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /am/src/server/prometheus.rs: -------------------------------------------------------------------------------- 1 | use crate::server::util::proxy_handler; 2 | use autometrics::autometrics; 3 | use axum::body::Body; 4 | use axum::response::IntoResponse; 5 | use url::Url; 6 | 7 | #[autometrics] 8 | pub(crate) async fn handler(req: http::Request) -> impl IntoResponse { 9 | let upstream_base = url::Url::parse("http://localhost:9090").unwrap(); 10 | proxy_handler(req, upstream_base).await 11 | } 12 | 13 | pub(crate) async fn handler_with_url( 14 | req: http::Request, 15 | upstream_base: &Url, 16 | ) -> impl IntoResponse { 17 | proxy_handler(req, upstream_base.clone()).await 18 | } 19 | -------------------------------------------------------------------------------- /am/src/server/pushgateway.rs: -------------------------------------------------------------------------------- 1 | use crate::server::util::proxy_handler; 2 | use autometrics::autometrics; 3 | use axum::body::Body; 4 | use axum::response::IntoResponse; 5 | use url::Url; 6 | 7 | #[autometrics] 8 | pub(crate) async fn handler(req: http::Request) -> impl IntoResponse { 9 | let upstream_base = Url::parse("http://localhost:9091").unwrap(); 10 | proxy_handler(req, upstream_base).await 11 | } 12 | 13 | #[autometrics] 14 | pub(crate) async fn metrics_proxy_handler(req: http::Request) -> impl IntoResponse { 15 | let upstream_base = Url::parse("http://localhost:9091/pushgateway/metrics").unwrap(); 16 | proxy_handler(req, upstream_base).await 17 | } 18 | -------------------------------------------------------------------------------- /am/src/server/util.rs: -------------------------------------------------------------------------------- 1 | use crate::commands::start::CLIENT; 2 | use axum::body; 3 | use axum::body::Body; 4 | use axum::response::{IntoResponse, Response}; 5 | use http::{StatusCode, Uri}; 6 | use tracing::{debug, error, trace, warn}; 7 | use url::Url; 8 | 9 | pub(crate) async fn proxy_handler( 10 | mut req: http::Request, 11 | upstream_base: Url, 12 | ) -> impl IntoResponse { 13 | let req_uri = req.uri().to_string(); 14 | let method = req.method().to_string(); 15 | 16 | trace!(req_uri=%req_uri, method=%method, "Proxying request"); 17 | 18 | // NOTE: The username/password is not forwarded 19 | let mut url = upstream_base.join(req.uri().path()).unwrap(); 20 | url.set_query(req.uri().query()); 21 | *req.uri_mut() = Uri::try_from(url.as_str()).unwrap(); 22 | 23 | let res = CLIENT.execute(req.try_into().unwrap()).await; 24 | 25 | match res { 26 | Ok(res) => { 27 | if res.status().is_server_error() { 28 | warn!( 29 | method=%method, 30 | req_uri=%req_uri, 31 | upstream_uri=%res.url(), 32 | status_code=%res.status(), 33 | "Response from the upstream source returned a server error status code", 34 | ); 35 | } else if res.status().is_client_error() { 36 | debug!( 37 | method=%method, 38 | req_uri=%req_uri, 39 | upstream_uri=%res.url(), 40 | status_code=%res.status(), 41 | "Response from the upstream source returned a client error status code", 42 | ); 43 | } else { 44 | trace!( 45 | method=%method, 46 | req_uri=%req_uri, 47 | upstream_uri=%res.url(), 48 | status_code=%res.status(), 49 | "Response from the upstream source", 50 | ); 51 | } 52 | 53 | convert_response(res).into_response() 54 | } 55 | Err(err) => { 56 | warn!( 57 | method=%method, 58 | req_uri=%req_uri, 59 | err=%err, 60 | "Unable to proxy request to upstream server", 61 | ); 62 | StatusCode::INTERNAL_SERVER_ERROR.into_response() 63 | } 64 | } 65 | } 66 | 67 | /// Convert a reqwest::Response into a axum_core::Response. 68 | /// 69 | /// If the Response builder is unable to create a Response, then it will log the 70 | /// error and return a http status code 500. 71 | /// 72 | /// We cannot implement this as an Into or From trait since both types are 73 | /// foreign to this code. 74 | pub(crate) fn convert_response(req: reqwest::Response) -> Response { 75 | let mut builder = http::Response::builder().status(req.status()); 76 | 77 | // Calling `headers_mut` is safe here because we're constructing a new 78 | // Response from scratch and it will only return `None` if the builder is in 79 | // a Error state. 80 | let headers = builder.headers_mut().unwrap(); 81 | for (name, value) in req.headers() { 82 | // Insert all the headers that were in the response from the upstream. 83 | headers.insert(name, value.clone()); 84 | } 85 | 86 | // TODO: Do we need to rewrite some headers, such as host? 87 | 88 | match builder.body(body::StreamBody::from(req.bytes_stream())) { 89 | Ok(res) => res.into_response(), 90 | Err(err) => { 91 | error!("Error converting response: {:?}", err); 92 | StatusCode::INTERNAL_SERVER_ERROR.into_response() 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /am/src/terminal.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use itertools::Itertools; 3 | use std::collections::HashMap; 4 | use std::io::Write; 5 | use std::time::Duration; 6 | use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; 7 | use tokio::sync::watch::Receiver; 8 | use tracing::info; 9 | 10 | pub(crate) fn wait_and_print_urls(mut rx: Receiver>) { 11 | tokio::spawn(async move { 12 | // wait a second until all other log messages (invoked in belows `select!`) are printed 13 | // Prometheus and Pushgateway usually dont take longer than a second to start so this should be good 14 | tokio::time::sleep(Duration::from_secs(1)).await; 15 | 16 | match rx.wait_for(|map| !map.is_empty()).await { 17 | Ok(map) => { 18 | let _ = print_urls(&map); 19 | } 20 | Err(err) => { 21 | info!(?err, "failed to wait for urls"); 22 | } 23 | } 24 | }); 25 | } 26 | 27 | pub(crate) fn print_urls(map: &HashMap<&str, String>) -> Result<()> { 28 | let length = map 29 | .iter() 30 | .map(|(name, _)| name.len() + 5) 31 | .max() 32 | .unwrap_or(0); 33 | 34 | let mut stdout = StandardStream::stdout(ColorChoice::Always); 35 | 36 | stdout.set_color(ColorSpec::new().set_fg(Some(Color::Magenta)).set_bold(true))?; 37 | write!(stdout, "\n am ")?; 38 | 39 | stdout.set_color( 40 | ColorSpec::new() 41 | .set_fg(Some(Color::Magenta)) 42 | .set_bold(false), 43 | )?; 44 | write!(stdout, "v{}", env!("CARGO_PKG_VERSION"))?; 45 | 46 | stdout.set_color(ColorSpec::new().set_fg(Some(Color::White)))?; 47 | writeln!(stdout, " press ctrl + c to shutdown\n")?; 48 | 49 | for (name, url) in map.iter().sorted_by(|(a, _), (b, _)| a.cmp(b)) { 50 | stdout.set_color(ColorSpec::new().set_fg(Some(Color::White)).set_bold(true))?; 51 | write!(stdout, " {:width$}", name, width = length)?; 52 | 53 | stdout.set_color(ColorSpec::new().set_fg(Some(Color::White)).set_bold(false))?; 54 | writeln!(stdout, " {}", url)?; 55 | } 56 | 57 | writeln!(stdout)?; 58 | Ok(()) 59 | } 60 | -------------------------------------------------------------------------------- /am_list/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /am_list/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes of `am_list` up to 0.3.0 will be documented in this file. 4 | After `0.3.0`, [`am_list`](https://github.com/autometrics-dev/am_list) is 5 | included in [`am`](https://github.com/autometrics-dev/am), and the relevant 6 | changes for `am_list` will be included in `am` README. 7 | 8 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 9 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 10 | 11 | 12 | ## [Version 0.3.0] - 2023-08-29 13 | 14 | ### Changed 15 | 16 | - Renamed `ExpectedAmLabel` to `FunctionInfo` 17 | - `ExpectedAmLabel.module` and `ExpectedAmLabel.function` are now nested in 18 | a new `FunctionId` structure: `FunctionInfo.id.(module|function)`. 19 | 20 | ### Added 21 | 22 | - Added locus information in the `FunctionInfo` struct. 23 | + `list --all` now includes the instrumentation location information when 24 | available in each returned `FunctionInfo` structure 25 | 26 | ## [Version 0.2.7] - 2023-08-16 27 | 28 | ### Changed 29 | 30 | - [Typescript] `am_list` will now skip all `node_modules` directory for 31 | Typescript. 32 | - [Typescript] Include Javascript files in the search 33 | 34 | ### Fixed 35 | 36 | - [Typescript] Fix detection of `@autometrics/autometrics` imports 37 | 38 | ## [Version 0.2.6] - 2023-07-27 39 | 40 | ### Added 41 | 42 | - [Python] Support for python language 43 | 44 | ## [Version 0.2.5] - 2023-07-19 45 | 46 | ### Added 47 | 48 | - [All] The `ExpectedAmLabel` structure now implements `serde::Deserialize` 49 | 50 | ## [Version 0.2.4] - 2023-07-06 51 | 52 | ### Fixed 53 | 54 | - [Go] The `list` subcommand now can also list all functions in a 55 | project. 56 | 57 | ## [Version 0.2.3] - 2023-07-04 58 | 59 | ### Added 60 | 61 | - [Typescript] Support for typescript language 62 | 63 | ## [Version 0.2.2] - 2023-06-19 64 | 65 | ### Added 66 | 67 | - [Rust] The `list` subcommand now takes an optional `--all-functions` (short `-a`) flag, 68 | which lists all functions/methods in the project instead of only the ones with the 69 | autometrics annotation. This allows to get an overview of how many functions are 70 | autometricized. The flag will crash on non-Rust implementations for now. 71 | 72 | ## [Version 0.2.1] - 2023-06-16 73 | 74 | ### Fixed 75 | 76 | - [Rust] The struct name is now part of the module path for detected methods 77 | - [Rust] Modules defined within a source file are properly detected, and part 78 | of the module path for detected methods 79 | 80 | ## [Version 0.2.0] – 2023-06-07 81 | 82 | ### Added 83 | 84 | ### Changed 85 | 86 | - The command to list all the function names is now a subcommand called 'list'. The 87 | change is done to accomodate for different subcommands in the future. 88 | - The output of the `list` command is now in JSON, to ease consumption for other 89 | programs 90 | 91 | ### Deprecated 92 | 93 | ### Removed 94 | 95 | ### Fixed 96 | 97 | ### Security 98 | 99 | ## [Version 0.1.0] – 2023-05-29 100 | 101 | ### Added 102 | 103 | - Support for parsing Rust and Go projects 104 | 105 | ### Changed 106 | 107 | ### Deprecated 108 | 109 | ### Removed 110 | 111 | ### Fixed 112 | 113 | ### Security 114 | 115 | [Version 0.3.0]: https://github.com/autometrics-dev/am_list/compare/v0.2.7...v0.3.0 116 | [Version 0.2.7]: https://github.com/autometrics-dev/am_list/compare/v0.2.6...v0.2.7 117 | [Version 0.2.6]: https://github.com/autometrics-dev/am_list/compare/v0.2.5...v0.2.6 118 | [Version 0.2.5]: https://github.com/autometrics-dev/am_list/compare/v0.2.4...v0.2.5 119 | [Version 0.2.4]: https://github.com/autometrics-dev/am_list/compare/v0.2.3...v0.2.4 120 | [Version 0.2.3]: https://github.com/autometrics-dev/am_list/compare/v0.2.2...v0.2.3 121 | [Version 0.2.2]: https://github.com/autometrics-dev/am_list/compare/v0.2.1...v0.2.2 122 | [Version 0.2.1]: https://github.com/autometrics-dev/am_list/compare/v0.2.0...v0.2.1 123 | [Version 0.2.0]: https://github.com/autometrics-dev/am_list/compare/v0.1.0...v0.2.0 124 | [Version 0.1.0]: https://github.com/autometrics-dev/am_list/releases/tag/v0.1.0 125 | -------------------------------------------------------------------------------- /am_list/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Implementing language support 4 | 5 | To add `am_list` support for a new language, you mostly need to know the usage 6 | patterns of autometrics in this language, and understand how to make and use 7 | tree-sitter queries. 8 | 9 | ### Function detection architecture 10 | 11 | The existing implementations can also be used as a stencil to add a new 12 | language. They all follow the same pattern: 13 | - a `src/{lang}.rs` which implements walking down a directory and collecting the 14 | `ExpectedAmLabel` labels on the files it traverses. This is the location where 15 | the `crate::ListAmFunctions` trait is implemented. 16 | - a `src/{lang}/queries.rs` which is a wrapper around a tree-sitter query, using 17 | the rust bindings. 18 | - a `runtime/queries/{lang}` folder which contains the actual queries being 19 | passed to tree-sitter: 20 | + the `.scm` files are direct queries, ready to be used "as-is", 21 | + the `.scm.tpl` files are format strings, which are meant to be templated at 22 | runtime according to other detection happening in the file. For example, 23 | when autometrics is a wrapper function that can be renamed in a file 24 | (`import { autometrics as am } from '@autometrics/autometrics'` in 25 | typescript), we need to change the exact query to detect wrapper usage in 26 | the file. 27 | 28 | ### Building queries 29 | 30 | The hardest part about supporting a language is to build the queries for it. The 31 | best resources to create those are on the main website of tree-sitter library: 32 | 33 | - the help section on [Query 34 | syntax](https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax) 35 | helps understand how to create queries, and 36 | - the [playground](https://tree-sitter.github.io/tree-sitter/playground) allows 37 | to paste some text and see both the syntax tree and the current highlight from 38 | a query. This enables creating queries incrementally. 39 | 40 | ![Screenshot of the Tree-sitter playground on their website](./assets/contributing/ts-playground.png) 41 | 42 | If you're using Neovim, it also has a very good [playground 43 | plugin](https://github.com/nvim-treesitter/playground) you can use. 44 | 45 | The goal is to test the query on the common usage patterns of autometrics, and 46 | make sure that you can match the function names. There are non-trivial 47 | limitations about what a tree-sitter query can and cannot match, so if you're 48 | blocked, don't hesitate to create a discussion or an issue to ask a question, 49 | and we will try to solve the problem together. 50 | 51 | ### Using queries 52 | 53 | Once you have you query, it's easier to wrap it in a Rust structure to keep 54 | track of the indices of the "captures" created within the query. It is best to 55 | look at how the queries are used in the existing implementations and try to 56 | replicate this, seeing how the bindings are used. In order of complexity, you 57 | should look at: 58 | - the [Go](./src/go/queries.rs) implementation, which has a very simple query. 59 | - the [Typescript](./src/typescript/queries.rs) implementation, which uses the 60 | result of a first query to dynamically create a second one, and collect the 61 | results of everything. 62 | - the [Rust](./src/rust/queries.rs) implementation, which uses dynamically 63 | created queries, but also uses a recursion pattern to keep track of modules 64 | declared in-file. 65 | -------------------------------------------------------------------------------- /am_list/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "am_list" 3 | description = "A tree-sitter-powered command line tool to list functions marked with Autometrics annotations" 4 | version.workspace = true 5 | edition.workspace = true 6 | authors.workspace = true 7 | documentation.workspace = true 8 | repository.workspace = true 9 | homepage.workspace = true 10 | license.workspace = true 11 | 12 | [dependencies] 13 | anyhow = "1.0.71" 14 | crop = "0.4.0" 15 | ignore = "0.4.20" 16 | itertools = "0.11.0" 17 | log = "0.4.18" 18 | rayon = "1.7.0" 19 | serde = { version = "1.0.163", features = ["derive"] } 20 | serde_json = "1.0.96" 21 | thiserror = "1.0.40" 22 | tree-sitter = "0.20.10" 23 | tree-sitter-go = "0.19.1" 24 | tree-sitter-python = "0.20.2" 25 | tree-sitter-rust = "0.20.3" 26 | tree-sitter-typescript = "0.20.2" 27 | walkdir = "2.3.3" 28 | 29 | [dev-dependencies] 30 | pretty_assertions = "1.3.0" 31 | -------------------------------------------------------------------------------- /am_list/README.md: -------------------------------------------------------------------------------- 1 | # Autometrics List 2 | 3 | A command that lists all functions that have the "autometrics" annotation. 4 | 5 | The aim is to use this binary as a quick static analyzer that returns from a 6 | codebase the complete list of functions that are annotated to be 7 | autometricized. 8 | 9 | The analysis is powered by [Tree-sitter](https://tree-sitter.github.io), and 10 | all the specific logic is contained in [Tree-sitter queries](./runtime/queries) 11 | that are specific for each language implementation. 12 | 13 | ## Quickstart 14 | 15 | Use the installer script to pull the latest version directly from Github 16 | (change `VERSION` accordingly): 17 | 18 | ```console 19 | VERSION=0.2.0 curl --proto '=https' --tlsv1.2 -LsSf https://github.com/autometrics-dev/am_list/releases/download/v$VERSION/am_list-installer.sh | sh 20 | ``` 21 | 22 | And run the binary 23 | 24 | ```bash 25 | # Make sure that `~/.cargo/bin` is in your `PATH` 26 | am_list list -l rs /path/to/project/root 27 | ``` 28 | 29 | ## Current state and known issues 30 | 31 | ### Language support table 32 | 33 | In the following table, having the "detection" feature means that `am_list` 34 | returns the exact same labels as the ones you would need to use in PromQL to 35 | look at the metrics. In a nutshell, 36 | "[Autometrics](https://github.com/autometrics-dev) compliance". 37 | 38 | | Language | Function name detection | Module detection | 39 | | :-------------------------------------------------------------: | :---------------------: | :--------------: | 40 | | [Rust](https://github.com/autometrics-dev/autometrics-rs) | ✅ | ✅ | 41 | | [Typescript](https://github.com/autometrics-dev/autometrics-ts) | ✅ | ⚠️[^wrapper] | 42 | | [Go](https://github.com/autometrics-dev/autometrics-go) | ⚠️[^all-functions] | ✅ | 43 | | [Python](https://github.com/autometrics-dev/autometrics-py) | ✅ | ✅ | 44 | | [C#](https://github.com/autometrics-dev/autometrics-cs) | ❌ | ❌ | 45 | 46 | [^wrapper]: 47 | For Typescript (and all languages where autometrics is a wrapper 48 | function), static analysis makes it hard to traverse imports to find the 49 | module where an instrumented function is _defined_, so the reported module 50 | is the module where the function has been _instrumented_ 51 | 52 | [^all-functions]: 53 | Support list all autometricized functions, but not all 54 | functions without restriction 55 | 56 | ### Typescript 57 | 58 | #### Module tracking 59 | 60 | This tool cannot track modules "accurately" (meaning "the module label is 61 | exactly what autometrics will report"), because autometrics-ts uses the path of 62 | the source in the JS-compiled bundle to report the module. The compilation and 63 | bundling happens after `am_list` looks at the code so it cannot be accurate. 64 | 65 | This means the module reporting for typescript is bound to be a "best effort" 66 | attempt to be useful. 67 | 68 | The other difficulty encountered when using a static analysis tool with autometrics-ts is that the 69 | instrumentation can happen anywhere, as the wrapper function call can use an imported symbol as its argument: 70 | 71 | ```typescript 72 | import { exec } from "child_process"; 73 | import { autometrics } from "@autometrics/autometrics"; 74 | 75 | const instrumentedExec = autometrics(exec); 76 | 77 | // use instrumentedExec everywhere instead of exec 78 | ``` 79 | 80 | In order to report the locus of _function definition_ as the module, we would 81 | need to include both: 82 | 83 | - a complete import resolution step, to figure out the origin module of the 84 | instrumented function (`child_process` in the example), and 85 | - a dependency inspection step, to figure out the path to the instrumented 86 | function definition _within_ the dependency (`lib/child_process.js` in the 87 | [node source code](https://github.com/nodejs/node/blob/main/lib/child_process.js)) 88 | 89 | This is impractical and error-prone to implement these steps accurately, so 90 | instead we only try to detect imports when they are explicitely imported in the 91 | same file, and we will only report the function module as the imported module 92 | (not the path to the file it is defined in). Practically that means that for 93 | this example: 94 | 95 | ```typescript 96 | // in src/router/index.ts 97 | import { exec } from "child_process"; 98 | import { origRoute as myRoute } from "../handlers"; 99 | import { autometrics } from "@autometrics/autometrics"; 100 | 101 | const instrumentedExec = autometrics(exec); 102 | const instrumentedRoute = autometrics(myRoute); 103 | 104 | // use instrumentedExec everywhere instead of exec 105 | ``` 106 | 107 | `am_list` will report 2 functions: 108 | 109 | - `{"function": "exec", "module": "ext://child_process"}`: using `ext://` 110 | protocol to say the module is non-local 111 | - `{"function": "origRoute", "module": "handlers"}`: even if `myRoute` is 112 | re-exported from `../handlers/my/routes/index.ts`, we do not go look into what 113 | `handlers` did to expose `origRoute`; also, the alias is resolved. 114 | -------------------------------------------------------------------------------- /am_list/assets/contributing/ts-playground.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/autometrics-dev/am/6b9b57f21692f7312fff9a57e26410a8534238a4/am_list/assets/contributing/ts-playground.png -------------------------------------------------------------------------------- /am_list/runtime/queries/README.md: -------------------------------------------------------------------------------- 1 | # Queries 2 | 3 | This folder contains files with all the queries that get eventually compiled in 4 | the binary for ease of use. 5 | 6 | 7 | Queries are expected to make captures named `@func.name` for each function name 8 | that is actually autometricized. 9 | -------------------------------------------------------------------------------- /am_list/runtime/queries/go/all_functions.scm: -------------------------------------------------------------------------------- 1 | ((package_clause 2 | (package_identifier) @pack.name) 3 | 4 | (function_declaration 5 | name: (identifier) @func.name)) 6 | 7 | ((package_clause 8 | (package_identifier) @pack.name) 9 | 10 | (method_declaration 11 | receiver: (parameter_list 12 | (parameter_declaration 13 | type: (type_identifier) @type.name)) 14 | name: (field_identifier) @func.name)) 15 | 16 | ((package_clause 17 | (package_identifier) @pack.name) 18 | 19 | (method_declaration 20 | receiver: (parameter_list 21 | (parameter_declaration 22 | type: (pointer_type (type_identifier) @type.name))) 23 | name: (field_identifier) @func.name)) 24 | -------------------------------------------------------------------------------- /am_list/runtime/queries/go/autometrics.scm: -------------------------------------------------------------------------------- 1 | ((package_clause 2 | (package_identifier) @pack.name) 3 | 4 | (comment) @dir.comment 5 | . 6 | (comment)* 7 | . 8 | (function_declaration 9 | name: (identifier) @func.name) 10 | (#match? @dir.comment "^//autometrics:(inst|doc)")) 11 | 12 | 13 | 14 | ((package_clause 15 | (package_identifier) @pack.name) 16 | 17 | (comment) @dir.comment 18 | . 19 | (comment)* 20 | . 21 | (method_declaration 22 | receiver: (parameter_list 23 | (parameter_declaration 24 | type: (type_identifier) @type.name)) 25 | name: (field_identifier) @func.name) 26 | (#match? @dir.comment "^//autometrics:(inst|doc)")) 27 | 28 | ((package_clause 29 | (package_identifier) @pack.name) 30 | 31 | (comment) @dir.comment 32 | . 33 | (comment)* 34 | . 35 | (method_declaration 36 | receiver: (parameter_list 37 | (parameter_declaration 38 | type: (pointer_type (type_identifier) @type.name))) 39 | name: (field_identifier) @func.name) 40 | (#match? @dir.comment "^//autometrics:(inst|doc)")) 41 | -------------------------------------------------------------------------------- /am_list/runtime/queries/python/all_functions.scm: -------------------------------------------------------------------------------- 1 | (function_definition 2 | name: (identifier) @func.name) 3 | -------------------------------------------------------------------------------- /am_list/runtime/queries/python/autometrics.scm.tpl: -------------------------------------------------------------------------------- 1 | (decorated_definition 2 | (decorator 3 | [(identifier) @decorator.name 4 | (call (identifier) @decorator.name)]) 5 | (#eq? @decorator.name "{0}") 6 | definition: (function_definition 7 | name: (identifier) @func.name)) 8 | -------------------------------------------------------------------------------- /am_list/runtime/queries/python/import.scm: -------------------------------------------------------------------------------- 1 | (import_from_statement 2 | module_name: (dotted_name (identifier) @import.module) 3 | (#eq? @import.module "autometrics") 4 | name:[ 5 | (dotted_name (identifier) @import.name) 6 | (aliased_import 7 | name: (dotted_name (identifier) @import.name) 8 | alias: (identifier) @import.alias)] 9 | (#eq? @import.name "autometrics")) 10 | -------------------------------------------------------------------------------- /am_list/runtime/queries/rust/all_functions.scm: -------------------------------------------------------------------------------- 1 | ((function_item 2 | name: (identifier) @func.name)) 3 | 4 | ;; It is impossible to do arbitrary levels of nesting, so we just detect module declarations to 5 | ;; call this query recursively on the declaration_list of the module. 6 | ;; Ref: https://github.com/tree-sitter/tree-sitter/discussions/981 7 | ((mod_item 8 | name: (identifier) @mod.name 9 | body: (declaration_list) @mod.contents)) 10 | 11 | ;; We want to skip the "bare" function detection (@func.name pattern in this file) when function 12 | ;; is within an impl block. The reason is that we cannot properly report the module name (which should 13 | ;; be the struct type name) if we use this detection method. 14 | ;; Therefore, we skip bare functions that are detected within impl blocks, and instead rely on 15 | ;; recursion to find functions within impl blocks. 16 | ;; 17 | ;; We also consider that an "impl block" is an "in-file" module for the purpose of recursion 18 | ;; This allows to detect functions that have the autometrics annotation within an impl block, 19 | ;; _while allowing to skip functions in impl blocks detected by the main query_. 20 | ((impl_item 21 | type: (type_identifier) @impl.type 22 | body: (declaration_list) @impl.contents)) 23 | -------------------------------------------------------------------------------- /am_list/runtime/queries/rust/am_struct.scm.tpl: -------------------------------------------------------------------------------- 1 | ((impl_item 2 | type: (type_identifier) @type.impl 3 | body: (declaration_list 4 | (function_item 5 | name: (identifier) @func.name))) 6 | (#match? @type.impl "({}){{1,1}}")) 7 | -------------------------------------------------------------------------------- /am_list/runtime/queries/rust/autometrics.scm: -------------------------------------------------------------------------------- 1 | ((attribute_item)* 2 | . 3 | (attribute_item 4 | (attribute 5 | (identifier) @attr)) 6 | . 7 | (attribute_item)* 8 | . 9 | (function_item 10 | name: (identifier) @func.name) 11 | (#eq? @attr "autometrics")) 12 | 13 | ((attribute_item)* 14 | . 15 | (attribute_item 16 | (attribute 17 | (identifier) @attr)) 18 | . 19 | (attribute_item)* 20 | . 21 | (impl_item 22 | type: (type_identifier) @type.impl 23 | body: (declaration_list 24 | (function_item 25 | name: (identifier) @inner.func.name))) 26 | 27 | (#eq? @attr "autometrics")) 28 | 29 | ;; It is impossible to do arbitrary levels of nesting, so we just detect module declarations to 30 | ;; call this query recursively on the declaration_list of the module. 31 | ;; Ref: https://github.com/tree-sitter/tree-sitter/discussions/981 32 | ((mod_item 33 | name: (identifier) @mod.name 34 | body: (declaration_list) @mod.contents)) 35 | 36 | ;; We want to skip the "bare" function detection (@func.name pattern in this file) when function 37 | ;; is within an impl block. The reason is that we cannot properly report the module name (which should 38 | ;; be the struct type name) if we use this detection method. 39 | ;; Therefore, we skip bare functions that are detected within impl blocks, and instead rely on 40 | ;; recursion to find functions within impl blocks. 41 | ;; 42 | ;; We also consider that an "impl block" is an "in-file" module for the purpose of recursion 43 | ;; This allows to detect functions that have the autometrics annotation within an impl block, 44 | ;; _while allowing to skip functions in impl blocks detected by the main query_. 45 | ((impl_item 46 | type: (type_identifier) @impl.type 47 | body: (declaration_list) @impl.contents)) 48 | -------------------------------------------------------------------------------- /am_list/runtime/queries/typescript/all_functions.scm: -------------------------------------------------------------------------------- 1 | (function_declaration 2 | name: (identifier) @func.name) 3 | 4 | (function 5 | name: (identifier) @func.name) 6 | 7 | (lexical_declaration 8 | (variable_declarator 9 | name: (identifier) @func.name 10 | value: (arrow_function) @func.value)) 11 | 12 | (lexical_declaration 13 | (variable_declarator 14 | name: (identifier) @func.name 15 | value: (function) @func.value)) 16 | 17 | (class_declaration 18 | name: (type_identifier) @type.name 19 | body: (class_body 20 | [(method_signature 21 | name: (property_identifier) @method.name) 22 | (method_definition 23 | name: (property_identifier) @method.name)])) 24 | -------------------------------------------------------------------------------- /am_list/runtime/queries/typescript/autometrics.scm: -------------------------------------------------------------------------------- 1 | ;; TODO: this doesn't work as captures aren't shared between patterns. 2 | ;; This means we can't use @wrapper.atname in an #eq expression in the (call_expression) pattern afterwards 3 | ;; A recursion algorithm that uses a templated query might be the solution 4 | ((import_statement 5 | (import_clause 6 | (named_imports 7 | (import_specifier 8 | . 9 | name: (identifier) @wrapperdirect.name 10 | .))) 11 | source: (string (string_fragment) @lib.atname)) 12 | (#match? @lib.atname "@autometrics\/autometrics|autometrics") 13 | (#eq? @wrapperdirect.name "autometrics")) 14 | 15 | ((import_statement 16 | (import_clause 17 | (named_imports 18 | (import_specifier 19 | name: (identifier) @real.name 20 | alias: (identifier) @wrapperdirect.name))) 21 | source: (string (string_fragment) @lib.name)) 22 | (#match? @lib.name "@autometrics\/autometrics|autometrics") 23 | (#eq? @real.name "autometrics")) 24 | 25 | 26 | 27 | ;; TODO: this doesn't work as captures aren't shared between patterns. 28 | ;; This means we can't use @wrapperdirect.name in an #eq expression in the (call_expression) pattern afterwards 29 | ;; A recursion algorithm that uses a templated query might be the solution 30 | ((import_statement 31 | (import_clause 32 | (named_imports 33 | (import_specifier 34 | . 35 | name: (identifier) @wrapper.name 36 | .))) 37 | source: (string (string_fragment) @lib.name)) 38 | (#match? @lib.name "@autometrics\/autometrics|autometrics") 39 | (#eq? @wrapper.name "autometrics")) 40 | 41 | ((import_statement 42 | (import_clause 43 | (named_imports 44 | (import_specifier 45 | name: (identifier) @real.name 46 | alias: (identifier) @wrapper.name))) 47 | source: (string (string_fragment) @lib.name)) 48 | (#match? @lib.name "@autometrics\/autometrics|autometrics") 49 | (#eq? @real.name "autometrics")) 50 | 51 | ((class_declaration 52 | decorator: (decorator (identifier) @decorator.name) 53 | name: (type_identifier) @type.name 54 | body: (class_body 55 | [(method_signature 56 | name: (property_identifier) @method.name) 57 | (method_definition 58 | name: (property_identifier) @method.name)])) 59 | (#eq? @decorator.name "Autometrics")) 60 | 61 | ((class_declaration 62 | decorator: (decorator (call_expression 63 | function: (identifier) @decorator.name)) 64 | name: (type_identifier) @type.name 65 | body: (class_body 66 | [(method_signature 67 | name: (property_identifier) @method.name) 68 | (method_definition 69 | name: (property_identifier) @method.name)])) 70 | (#eq? @decorator.name "Autometrics")) 71 | -------------------------------------------------------------------------------- /am_list/runtime/queries/typescript/imports_map.scm: -------------------------------------------------------------------------------- 1 | ;; This query extracts all the imports of the current source 2 | ;; NOTE: it is impossible to merge "looking for imports" with the 3 | ;; "looking for autometricized functions" queries for at least 2 reasons: 4 | ;; - The "call_expression" is not necessarily a sibling node to the imports, and it's not possible 5 | ;; to match a "call_expression" as an arbitrarily deep "cousin" of the import_clause node. 6 | ;; - There is no builtin `#prefix?` operator, which makes checking for namespaced imports 7 | ;; impossible to do in 1 query 8 | 9 | ((import_statement 10 | (import_clause 11 | (named_imports 12 | (import_specifier 13 | name: (identifier) @inst.ident))) 14 | source: (string (string_fragment) @inst.source))) 15 | 16 | (import_statement 17 | (import_clause 18 | (named_imports 19 | (import_specifier 20 | name: (identifier) @inst.realname 21 | alias: (identifier) @inst.ident))) 22 | source: (string (string_fragment) @inst.source)) 23 | 24 | (import_statement 25 | (import_clause 26 | (namespace_import (identifier) @inst.prefix)) 27 | source: (string (string_fragment) @inst.source)) 28 | -------------------------------------------------------------------------------- /am_list/runtime/queries/typescript/wrapper_call.scm.tpl: -------------------------------------------------------------------------------- 1 | ((call_expression 2 | function: (identifier) @wrapper.call 3 | arguments: (arguments 4 | . 5 | (object 6 | (pair 7 | key: (property_identifier) @func.prop 8 | value: (string (string_fragment) @func.name)) 9 | (pair 10 | key: (property_identifier) @mod.prop 11 | value: (string (string_fragment) @module.name))))) 12 | (#eq? @wrapper.call "{0}") 13 | (#eq? @func.prop "functionName") 14 | (#eq? @mod.prop "moduleName")) 15 | 16 | ((call_expression 17 | function: (identifier) @wrapper.call 18 | arguments: (arguments 19 | . 20 | (object 21 | (pair 22 | key: (property_identifier) @mod.prop 23 | value: (string (string_fragment) @module.name)) 24 | (pair 25 | key: (property_identifier) @func.prop 26 | value: (string (string_fragment) @func.name))))) 27 | (#eq? @wrapper.call "{0}") 28 | (#eq? @func.prop "functionName") 29 | (#eq? @mod.prop "moduleName")) 30 | -------------------------------------------------------------------------------- /am_list/runtime/queries/typescript/wrapper_direct_call.scm.tpl: -------------------------------------------------------------------------------- 1 | ((call_expression 2 | function: (identifier) @wrapper.call 3 | arguments: (arguments (identifier) @func.name)) 4 | (#eq? @wrapper.call "{0}")) 5 | 6 | ((call_expression 7 | function: (identifier) @wrapper.call 8 | arguments: (arguments (member_expression) @func.name)) 9 | (#eq? @wrapper.call "{0}")) 10 | 11 | ((call_expression 12 | function: (identifier) @wrapper.call 13 | arguments: (arguments 14 | (function 15 | name: (identifier) @func.name))) 16 | (#eq? @wrapper.call "{0}")) 17 | -------------------------------------------------------------------------------- /am_list/src/go.rs: -------------------------------------------------------------------------------- 1 | mod queries; 2 | 3 | use crate::{FunctionInfo, InstrumentFile, ListAmFunctions, Result}; 4 | use log::debug; 5 | use queries::{AllFunctionsQuery, AmQuery}; 6 | use rayon::prelude::*; 7 | use std::{ 8 | collections::HashSet, 9 | fs::read_to_string, 10 | path::{Path, PathBuf}, 11 | }; 12 | use walkdir::{DirEntry, WalkDir}; 13 | 14 | /// Implementation of the Go support for listing autometricized functions. 15 | #[derive(Clone, Copy, Debug, Default)] 16 | pub struct Impl {} 17 | 18 | impl Impl { 19 | fn is_hidden(entry: &DirEntry) -> bool { 20 | entry 21 | .file_name() 22 | .to_str() 23 | .map(|s| s.starts_with('.')) 24 | .unwrap_or(false) 25 | } 26 | 27 | fn is_valid(entry: &DirEntry) -> bool { 28 | if Impl::is_hidden(entry) { 29 | return false; 30 | } 31 | entry.file_type().is_dir() 32 | || entry 33 | .file_name() 34 | .to_str() 35 | .map(|s| s.ends_with(".go")) 36 | .unwrap_or(false) 37 | } 38 | 39 | fn list_files( 40 | project_root: &Path, 41 | exclude_patterns: Option<&ignore::gitignore::Gitignore>, 42 | ) -> Vec { 43 | const PREALLOCATED_ELEMS: usize = 100; 44 | let walker = WalkDir::new(project_root).into_iter(); 45 | let mut project_files = Vec::with_capacity(PREALLOCATED_ELEMS); 46 | project_files.extend(walker.filter_entry(Self::is_valid).filter_map(|entry| { 47 | let entry = entry.ok()?; 48 | 49 | if let Some(pattern) = exclude_patterns { 50 | let ignore_match = 51 | pattern.matched_path_or_any_parents(entry.path(), entry.file_type().is_dir()); 52 | if matches!(ignore_match, ignore::Match::Ignore(_)) { 53 | debug!( 54 | "The exclusion pattern got a match on {}: {:?}", 55 | entry.path().display(), 56 | ignore_match 57 | ); 58 | return None; 59 | } 60 | } 61 | 62 | Some( 63 | entry 64 | .path() 65 | .to_str() 66 | .map(ToString::to_string) 67 | .unwrap_or_default(), 68 | ) 69 | })); 70 | 71 | project_files 72 | } 73 | } 74 | 75 | impl ListAmFunctions for Impl { 76 | fn list_autometrics_functions(&mut self, project_root: &Path) -> Result> { 77 | const PREALLOCATED_ELEMS: usize = 100; 78 | let mut list = HashSet::with_capacity(PREALLOCATED_ELEMS); 79 | 80 | let project_files = Self::list_files(project_root, None); 81 | let query = AmQuery::try_new()?; 82 | 83 | list.par_extend(project_files.par_iter().filter_map(move |path| { 84 | let source = read_to_string(path).ok()?; 85 | let file_name = PathBuf::from(path) 86 | .strip_prefix(project_root) 87 | .expect("path comes from a project_root WalkDir") 88 | .to_str() 89 | .expect("file_name is a valid path as it is part of `path`") 90 | .to_string(); 91 | let names = query 92 | .list_function_names(&file_name, &source) 93 | .unwrap_or_default(); 94 | Some(names) 95 | })); 96 | 97 | let mut result = Vec::with_capacity(PREALLOCATED_ELEMS); 98 | result.extend(list.into_iter().flatten()); 99 | Ok(result) 100 | } 101 | 102 | fn list_all_function_definitions(&mut self, project_root: &Path) -> Result> { 103 | const PREALLOCATED_ELEMS: usize = 100; 104 | let mut list = HashSet::with_capacity(PREALLOCATED_ELEMS); 105 | 106 | let project_files = Self::list_files(project_root, None); 107 | let query = AllFunctionsQuery::try_new()?; 108 | 109 | list.par_extend(project_files.par_iter().filter_map(move |path| { 110 | let source = read_to_string(path).ok()?; 111 | let file_name = PathBuf::from(path) 112 | .strip_prefix(project_root) 113 | .expect("path comes from a project_root WalkDir") 114 | .to_str() 115 | .expect("file_name is a valid path as it is part of `path`") 116 | .to_string(); 117 | let names = query 118 | .list_function_names(&file_name, &source) 119 | .unwrap_or_default(); 120 | Some(names) 121 | })); 122 | 123 | let mut result = Vec::with_capacity(PREALLOCATED_ELEMS); 124 | result.extend(list.into_iter().flatten()); 125 | Ok(result) 126 | } 127 | 128 | fn list_autometrics_functions_in_single_file( 129 | &mut self, 130 | source_code: &str, 131 | ) -> Result> { 132 | let query = AmQuery::try_new()?; 133 | query.list_function_names("", source_code) 134 | } 135 | 136 | fn list_all_function_definitions_in_single_file( 137 | &mut self, 138 | source_code: &str, 139 | ) -> Result> { 140 | let query = AllFunctionsQuery::try_new()?; 141 | query.list_function_names("", source_code) 142 | } 143 | } 144 | 145 | impl InstrumentFile for Impl { 146 | fn instrument_source_code(&mut self, source: &str) -> Result { 147 | let mut locations = self.list_all_functions_in_single_file(source)?; 148 | locations.sort_by_key(|info| { 149 | info.definition 150 | .as_ref() 151 | .map(|def| def.range.start.line) 152 | .unwrap_or_default() 153 | }); 154 | 155 | let has_am_directive = source 156 | .lines() 157 | .any(|line| line.starts_with("//go:generate autometrics")); 158 | 159 | let mut new_code = crop::Rope::from(source); 160 | // Keeping track of inserted lines to update the byte offset to insert code to, 161 | // only works if the locations list is sorted from top to bottom 162 | let mut inserted_lines = 0; 163 | 164 | if !has_am_directive { 165 | new_code.insert(0, "//go:generate autometrics --otel\n"); 166 | inserted_lines += 1; 167 | } 168 | 169 | for function_info in locations { 170 | if function_info.definition.is_none() || function_info.instrumentation.is_some() { 171 | continue; 172 | } 173 | 174 | let def_line = function_info.definition.as_ref().unwrap().range.start.line; 175 | let byte_offset = new_code.byte_of_line(inserted_lines + def_line); 176 | new_code.insert(byte_offset, "//autometrics:inst\n"); 177 | inserted_lines += 1; 178 | } 179 | 180 | Ok(new_code.to_string()) 181 | } 182 | 183 | fn instrument_project( 184 | &mut self, 185 | project_root: &Path, 186 | exclude_patterns: Option<&ignore::gitignore::Gitignore>, 187 | ) -> Result<()> { 188 | let sources_modules = Self::list_files(project_root, exclude_patterns); 189 | debug!("Found sources {sources_modules:?}"); 190 | 191 | for path in sources_modules { 192 | if std::fs::metadata(&path)?.is_dir() { 193 | continue; 194 | } 195 | debug!("Instrumenting {path}"); 196 | let old_source = read_to_string(&path)?; 197 | let new_source = self.instrument_source_code(&old_source)?; 198 | std::fs::write(path, new_source)?; 199 | } 200 | 201 | Ok(()) 202 | } 203 | } 204 | 205 | #[cfg(test)] 206 | mod tests; 207 | -------------------------------------------------------------------------------- /am_list/src/go/queries.rs: -------------------------------------------------------------------------------- 1 | use crate::{AmlError, FunctionInfo, Location, Result, FUNC_NAME_CAPTURE}; 2 | use log::error; 3 | use tree_sitter::{Parser, Query}; 4 | use tree_sitter_go::language; 5 | 6 | const PACK_NAME_CAPTURE: &str = "pack.name"; 7 | const TYPE_NAME_CAPTURE: &str = "type.name"; 8 | 9 | fn new_parser() -> Result { 10 | let mut parser = Parser::new(); 11 | parser.set_language(language())?; 12 | Ok(parser) 13 | } 14 | 15 | /// Query wrapper for "all autometrics functions in source" 16 | #[derive(Debug)] 17 | pub(super) struct AmQuery { 18 | query: Query, 19 | /// Index of the capture for a Type, in the case of methods. 20 | type_name_idx: u32, 21 | /// Index of the capture for a function name. 22 | func_name_idx: u32, 23 | /// Index of the capture for the package name. 24 | mod_name_idx: u32, 25 | } 26 | 27 | impl AmQuery { 28 | /// Failible constructor. 29 | /// 30 | /// The constructor only fails if the given tree-sitter query does not have the 31 | /// necessary named captures. 32 | pub fn try_new() -> Result { 33 | let query = Query::new( 34 | language(), 35 | include_str!("../../runtime/queries/go/autometrics.scm"), 36 | )?; 37 | let type_name_idx = query 38 | .capture_index_for_name(TYPE_NAME_CAPTURE) 39 | .ok_or_else(|| AmlError::MissingNamedCapture(TYPE_NAME_CAPTURE.to_string()))?; 40 | let func_name_idx = query 41 | .capture_index_for_name(FUNC_NAME_CAPTURE) 42 | .ok_or_else(|| AmlError::MissingNamedCapture(FUNC_NAME_CAPTURE.to_string()))?; 43 | let mod_name_idx = query 44 | .capture_index_for_name(PACK_NAME_CAPTURE) 45 | .ok_or_else(|| AmlError::MissingNamedCapture(PACK_NAME_CAPTURE.to_string()))?; 46 | 47 | Ok(Self { 48 | query, 49 | type_name_idx, 50 | func_name_idx, 51 | mod_name_idx, 52 | }) 53 | } 54 | 55 | pub fn list_function_names(&self, file_name: &str, source: &str) -> Result> { 56 | let mut parser = new_parser()?; 57 | let parsed_source = parser.parse(source, None).ok_or(AmlError::Parsing)?; 58 | 59 | let mut cursor = tree_sitter::QueryCursor::new(); 60 | cursor 61 | .matches(&self.query, parsed_source.root_node(), source.as_bytes()) 62 | .filter_map(|capture| -> Option> { 63 | let module = capture 64 | .nodes_for_capture_index(self.mod_name_idx) 65 | .next() 66 | .map(|node| node.utf8_text(source.as_bytes()).map(ToString::to_string))?; 67 | let type_name = capture 68 | .nodes_for_capture_index(self.type_name_idx) 69 | .next() 70 | .map(|node| node.utf8_text(source.as_bytes()).map(ToString::to_string)) 71 | .transpose(); 72 | let fn_node = capture.nodes_for_capture_index(self.func_name_idx).next()?; 73 | let fn_name = fn_node 74 | .utf8_text(source.as_bytes()) 75 | .map(ToString::to_string); 76 | let start = fn_node.start_position(); 77 | let end = fn_node.end_position(); 78 | let instrumentation = Some(Location::from((file_name, start, end))); 79 | let definition = Some(Location::from((file_name, start, end))); 80 | 81 | match (module, type_name, fn_name) { 82 | (Ok(module), Ok(type_name), Ok(function)) => Some(Ok(FunctionInfo { 83 | id: ( 84 | module, 85 | format!( 86 | "{}{function}", 87 | if let Some(go_type) = type_name { 88 | format!("{go_type}.") 89 | } else { 90 | String::new() 91 | } 92 | ), 93 | ) 94 | .into(), 95 | instrumentation, 96 | definition, 97 | })), 98 | (Err(err_mod), _, _) => { 99 | error!("could not fetch the package name: {err_mod}"); 100 | Some(Err(AmlError::InvalidText)) 101 | } 102 | (_, Err(err_typ), _) => { 103 | error!("could not fetch the package name: {err_typ}"); 104 | Some(Err(AmlError::InvalidText)) 105 | } 106 | (_, _, Err(err_fn)) => { 107 | error!("could not fetch the package name: {err_fn}"); 108 | Some(Err(AmlError::InvalidText)) 109 | } 110 | } 111 | }) 112 | .collect::, _>>() 113 | } 114 | } 115 | 116 | /// Query wrapper for "all functions in source" 117 | #[derive(Debug)] 118 | pub(super) struct AllFunctionsQuery { 119 | query: Query, 120 | /// Index of the capture for a Type, in the case of methods. 121 | type_name_idx: u32, 122 | /// Index of the capture for a function name. 123 | func_name_idx: u32, 124 | /// Index of the capture for the package name. 125 | mod_name_idx: u32, 126 | } 127 | 128 | impl AllFunctionsQuery { 129 | /// Failible constructor. 130 | /// 131 | /// The constructor only fails if the given tree-sitter query does not have the 132 | /// necessary named captures. 133 | pub fn try_new() -> Result { 134 | let query = Query::new( 135 | language(), 136 | include_str!("../../runtime/queries/go/all_functions.scm"), 137 | )?; 138 | let type_name_idx = query 139 | .capture_index_for_name(TYPE_NAME_CAPTURE) 140 | .ok_or_else(|| AmlError::MissingNamedCapture(TYPE_NAME_CAPTURE.to_string()))?; 141 | let func_name_idx = query 142 | .capture_index_for_name(FUNC_NAME_CAPTURE) 143 | .ok_or_else(|| AmlError::MissingNamedCapture(FUNC_NAME_CAPTURE.to_string()))?; 144 | let mod_name_idx = query 145 | .capture_index_for_name(PACK_NAME_CAPTURE) 146 | .ok_or_else(|| AmlError::MissingNamedCapture(PACK_NAME_CAPTURE.to_string()))?; 147 | 148 | Ok(Self { 149 | query, 150 | type_name_idx, 151 | func_name_idx, 152 | mod_name_idx, 153 | }) 154 | } 155 | 156 | pub fn list_function_names(&self, file_name: &str, source: &str) -> Result> { 157 | let mut parser = new_parser()?; 158 | let parsed_source = parser.parse(source, None).ok_or(AmlError::Parsing)?; 159 | 160 | let mut cursor = tree_sitter::QueryCursor::new(); 161 | cursor 162 | .matches(&self.query, parsed_source.root_node(), source.as_bytes()) 163 | .filter_map(|capture| -> Option> { 164 | let module = capture 165 | .nodes_for_capture_index(self.mod_name_idx) 166 | .next() 167 | .map(|node| node.utf8_text(source.as_bytes()).map(ToString::to_string))?; 168 | let type_name = capture 169 | .nodes_for_capture_index(self.type_name_idx) 170 | .next() 171 | .map(|node| node.utf8_text(source.as_bytes()).map(ToString::to_string)) 172 | .transpose(); 173 | let fn_node = capture.nodes_for_capture_index(self.func_name_idx).next()?; 174 | let fn_name = fn_node 175 | .utf8_text(source.as_bytes()) 176 | .map(ToString::to_string); 177 | let start = fn_node.start_position(); 178 | let end = fn_node.end_position(); 179 | let instrumentation = None; 180 | let definition = Some(Location::from((file_name, start, end))); 181 | 182 | match (module, type_name, fn_name) { 183 | (Ok(module), Ok(type_name), Ok(function)) => Some(Ok(FunctionInfo { 184 | id: ( 185 | module, 186 | format!( 187 | "{}{function}", 188 | if let Some(go_type) = type_name { 189 | format!("{go_type}.") 190 | } else { 191 | String::new() 192 | } 193 | ), 194 | ) 195 | .into(), 196 | instrumentation, 197 | definition, 198 | })), 199 | (Err(err_mod), _, _) => { 200 | error!("could not fetch the package name: {err_mod}"); 201 | Some(Err(AmlError::InvalidText)) 202 | } 203 | (_, Err(err_typ), _) => { 204 | error!("could not fetch the package name: {err_typ}"); 205 | Some(Err(AmlError::InvalidText)) 206 | } 207 | (_, _, Err(err_fn)) => { 208 | error!("could not fetch the package name: {err_fn}"); 209 | Some(Err(AmlError::InvalidText)) 210 | } 211 | } 212 | }) 213 | .collect::, _>>() 214 | } 215 | } 216 | -------------------------------------------------------------------------------- /am_list/src/go/tests.rs: -------------------------------------------------------------------------------- 1 | //! These tests are mostly for the queries, to ensure that querying only 2 | //! autometricized functions, or all functions, give the correct set of 3 | //! [`FunctionInfo`] entries. It is up to the [`Impl`] structure for each 4 | //! language to then merge the sets so that functions that get detected by both 5 | //! queries have their information merged. 6 | 7 | use crate::{Location, Position, Range}; 8 | 9 | use super::*; 10 | use pretty_assertions::assert_eq; 11 | 12 | const FILE_NAME: &str = "source.go"; 13 | 14 | #[test] 15 | fn detect_simple() { 16 | let source = r#" 17 | package lambda 18 | 19 | //autometrics:inst 20 | func the_one() { 21 | return nil 22 | } 23 | "#; 24 | 25 | let query = AmQuery::try_new().unwrap(); 26 | let list = query.list_function_names(FILE_NAME, source).unwrap(); 27 | let all_query = AllFunctionsQuery::try_new().unwrap(); 28 | let all_list = all_query.list_function_names(FILE_NAME, source).unwrap(); 29 | 30 | let the_one_location = Location { 31 | file: FILE_NAME.to_string(), 32 | range: Range { 33 | start: Position { 34 | line: 4, 35 | column: 13, 36 | }, 37 | end: Position { 38 | line: 4, 39 | column: 20, 40 | }, 41 | }, 42 | }; 43 | 44 | let the_one_instrumented = FunctionInfo { 45 | id: ("lambda", "the_one").into(), 46 | instrumentation: Some(the_one_location.clone()), 47 | definition: Some(the_one_location.clone()), 48 | }; 49 | 50 | let the_one_all_functions = FunctionInfo { 51 | id: ("lambda", "the_one").into(), 52 | instrumentation: None, 53 | definition: Some(the_one_location), 54 | }; 55 | 56 | assert_eq!(list.len(), 1); 57 | assert_eq!(list[0], the_one_instrumented); 58 | 59 | assert_eq!(all_list.len(), 1); 60 | assert_eq!(all_list[0], the_one_all_functions); 61 | } 62 | 63 | #[test] 64 | fn detect_legacy() { 65 | let source = r#" 66 | package beta 67 | 68 | func not_the_one() { 69 | } 70 | 71 | //autometrics:doc 72 | func sandwiched_function() { 73 | return nil 74 | } 75 | 76 | func not_that_one_either() { 77 | } 78 | "#; 79 | 80 | let query = AmQuery::try_new().unwrap(); 81 | let list = query.list_function_names(FILE_NAME, source).unwrap(); 82 | let all_query = AllFunctionsQuery::try_new().unwrap(); 83 | let all_list = all_query.list_function_names(FILE_NAME, source).unwrap(); 84 | 85 | let not_the_one_location = Location { 86 | file: FILE_NAME.to_string(), 87 | range: Range { 88 | start: Position { 89 | line: 3, 90 | column: 13, 91 | }, 92 | end: Position { 93 | line: 3, 94 | column: 13 + "not_the_one".len(), 95 | }, 96 | }, 97 | }; 98 | 99 | let sandwiched_function_location = Location { 100 | file: FILE_NAME.to_string(), 101 | range: Range { 102 | start: Position { 103 | line: 7, 104 | column: 13, 105 | }, 106 | end: Position { 107 | line: 7, 108 | column: 13 + "sandwiched_function".len(), 109 | }, 110 | }, 111 | }; 112 | 113 | let not_that_one_either_location = Location { 114 | file: FILE_NAME.to_string(), 115 | range: Range { 116 | start: Position { 117 | line: 11, 118 | column: 13, 119 | }, 120 | end: Position { 121 | line: 11, 122 | column: 13 + "not_that_one_either".len(), 123 | }, 124 | }, 125 | }; 126 | 127 | let sandwiched_instrumented = FunctionInfo { 128 | id: ("beta", "sandwiched_function").into(), 129 | instrumentation: Some(sandwiched_function_location.clone()), 130 | definition: Some(sandwiched_function_location.clone()), 131 | }; 132 | let sandwiched_all = FunctionInfo { 133 | id: ("beta", "sandwiched_function").into(), 134 | instrumentation: None, 135 | definition: Some(sandwiched_function_location.clone()), 136 | }; 137 | let not_the_one = FunctionInfo { 138 | id: ("beta", "not_the_one").into(), 139 | instrumentation: None, 140 | definition: Some(not_the_one_location), 141 | }; 142 | let not_that_one = FunctionInfo { 143 | id: ("beta", "not_that_one_either").into(), 144 | instrumentation: None, 145 | definition: Some(not_that_one_either_location), 146 | }; 147 | 148 | assert_eq!(list.len(), 1); 149 | assert_eq!(list[0], sandwiched_instrumented); 150 | 151 | assert_eq!( 152 | all_list.len(), 153 | 3, 154 | "complete functions list should have 3 items, got {} instead: {all_list:?}", 155 | all_list.len() 156 | ); 157 | assert!(all_list.contains(&sandwiched_all)); 158 | assert!(all_list.contains(¬_the_one)); 159 | assert!(all_list.contains(¬_that_one)); 160 | } 161 | 162 | #[test] 163 | fn detect_method() { 164 | let source = r#" 165 | package lambda 166 | 167 | //autometrics:inst 168 | func (s Server) the_one() { 169 | return nil 170 | } 171 | "#; 172 | 173 | let query = AmQuery::try_new().unwrap(); 174 | let list = query.list_function_names(FILE_NAME, source).unwrap(); 175 | let all_query = AllFunctionsQuery::try_new().unwrap(); 176 | let all_list = all_query.list_function_names(FILE_NAME, source).unwrap(); 177 | 178 | let the_one_location = Location { 179 | file: FILE_NAME.to_string(), 180 | range: Range { 181 | start: Position { 182 | line: 4, 183 | column: 24, 184 | }, 185 | end: Position { 186 | line: 4, 187 | column: 31, 188 | }, 189 | }, 190 | }; 191 | 192 | let the_one_instrumented = FunctionInfo { 193 | id: ("lambda", "Server.the_one").into(), 194 | instrumentation: Some(the_one_location.clone()), 195 | definition: Some(the_one_location.clone()), 196 | }; 197 | 198 | let the_one_all_functions = FunctionInfo { 199 | definition: Some(the_one_location), 200 | id: ("lambda", "Server.the_one").into(), 201 | instrumentation: None, 202 | }; 203 | 204 | assert_eq!(list.len(), 1); 205 | assert_eq!(list[0], the_one_instrumented); 206 | 207 | assert_eq!(all_list.len(), 1); 208 | assert_eq!(all_list[0], the_one_all_functions); 209 | } 210 | 211 | #[test] 212 | fn detect_method_pointer_receiver() { 213 | let source = r#" 214 | package lambda 215 | 216 | //autometrics:inst 217 | func (h *Handler) the_one() { 218 | return nil 219 | } 220 | "#; 221 | 222 | let query = AmQuery::try_new().unwrap(); 223 | let list = query.list_function_names(FILE_NAME, source).unwrap(); 224 | let all_query = AllFunctionsQuery::try_new().unwrap(); 225 | let all_list = all_query.list_function_names(FILE_NAME, source).unwrap(); 226 | 227 | let the_one_location = Location { 228 | file: FILE_NAME.to_string(), 229 | range: Range { 230 | start: Position { 231 | line: 4, 232 | column: 26, 233 | }, 234 | end: Position { 235 | line: 4, 236 | column: 33, 237 | }, 238 | }, 239 | }; 240 | 241 | let the_one_instrumented = FunctionInfo { 242 | id: ("lambda", "Handler.the_one").into(), 243 | instrumentation: Some(the_one_location.clone()), 244 | definition: Some(the_one_location.clone()), 245 | }; 246 | 247 | let the_one_all_functions = FunctionInfo { 248 | id: ("lambda", "Handler.the_one").into(), 249 | instrumentation: None, 250 | definition: Some(the_one_location), 251 | }; 252 | 253 | assert_eq!(list.len(), 1); 254 | assert_eq!(list[0], the_one_instrumented); 255 | 256 | assert_eq!(all_list.len(), 1); 257 | assert_eq!(all_list[0], the_one_all_functions); 258 | } 259 | 260 | #[test] 261 | fn instrument_method() { 262 | let source = r#" 263 | package lambda 264 | 265 | func (s Server) the_one() { 266 | return nil 267 | } 268 | "#; 269 | 270 | let expected = r#"//go:generate autometrics --otel 271 | 272 | package lambda 273 | 274 | //autometrics:inst 275 | func (s Server) the_one() { 276 | return nil 277 | } 278 | "#; 279 | 280 | let mut implementation = Impl {}; 281 | let actual = implementation.instrument_source_code(source).unwrap(); 282 | assert_eq!(&actual, expected); 283 | } 284 | 285 | #[test] 286 | fn instrument_multiple() { 287 | let source = r#"package beta 288 | 289 | func not_the_one() { 290 | } 291 | 292 | //autometrics:doc 293 | func sandwiched_function() { 294 | return nil 295 | } 296 | 297 | func not_that_one_either() { 298 | } 299 | "#; 300 | 301 | let expected = r#"//go:generate autometrics --otel 302 | package beta 303 | 304 | //autometrics:inst 305 | func not_the_one() { 306 | } 307 | 308 | //autometrics:doc 309 | func sandwiched_function() { 310 | return nil 311 | } 312 | 313 | //autometrics:inst 314 | func not_that_one_either() { 315 | } 316 | "#; 317 | 318 | let mut implementation = Impl {}; 319 | let actual = implementation.instrument_source_code(source).unwrap(); 320 | assert_eq!(&actual, expected); 321 | } 322 | -------------------------------------------------------------------------------- /am_list/src/python.rs: -------------------------------------------------------------------------------- 1 | mod queries; 2 | 3 | use crate::{FunctionInfo, InstrumentFile, ListAmFunctions, Result}; 4 | use log::debug; 5 | use queries::{AllFunctionsQuery, AmImportQuery, AmQuery}; 6 | use rayon::prelude::*; 7 | use std::{ 8 | collections::HashSet, 9 | fs::read_to_string, 10 | path::{Path, PathBuf, MAIN_SEPARATOR}, 11 | }; 12 | use walkdir::{DirEntry, WalkDir}; 13 | 14 | /// Implementation of the Python support for listing autometricized functions. 15 | #[derive(Clone, Copy, Debug, Default)] 16 | pub struct Impl {} 17 | 18 | impl Impl { 19 | fn is_hidden(entry: &DirEntry) -> bool { 20 | entry 21 | .file_name() 22 | .to_str() 23 | .map(|s| s.starts_with('.')) 24 | .unwrap_or(false) 25 | } 26 | 27 | fn is_valid(entry: &DirEntry) -> bool { 28 | if Impl::is_hidden(entry) { 29 | return false; 30 | } 31 | entry.file_type().is_dir() 32 | || entry 33 | .path() 34 | .extension() 35 | .map_or(false, |ext| ext == "py" || ext == "py3") 36 | } 37 | 38 | fn list_files( 39 | project_root: &Path, 40 | exclude_patterns: Option<&ignore::gitignore::Gitignore>, 41 | ) -> Vec { 42 | const PREALLOCATED_ELEMS: usize = 100; 43 | let walker = WalkDir::new(project_root).into_iter(); 44 | let mut project_files = Vec::with_capacity(PREALLOCATED_ELEMS); 45 | project_files.extend(walker.filter_entry(Self::is_valid).filter_map(|entry| { 46 | let entry = entry.ok()?; 47 | 48 | if let Some(pattern) = exclude_patterns { 49 | let ignore_match = 50 | pattern.matched_path_or_any_parents(entry.path(), entry.file_type().is_dir()); 51 | if matches!(ignore_match, ignore::Match::Ignore(_)) { 52 | debug!( 53 | "The exclusion pattern got a match on {}: {:?}", 54 | entry.path().display(), 55 | ignore_match 56 | ); 57 | return None; 58 | } 59 | } 60 | 61 | Some( 62 | entry 63 | .path() 64 | .to_str() 65 | .map(ToString::to_string) 66 | .unwrap_or_default(), 67 | ) 68 | })); 69 | 70 | project_files 71 | } 72 | } 73 | 74 | impl ListAmFunctions for Impl { 75 | fn list_autometrics_functions(&mut self, project_root: &Path) -> Result> { 76 | const PREALLOCATED_ELEMS: usize = 100; 77 | let mut list = HashSet::with_capacity(PREALLOCATED_ELEMS); 78 | let root_name = project_root 79 | .file_name() 80 | .map(|s| s.to_str().unwrap_or_default()) 81 | .unwrap_or(""); 82 | let project_files = Self::list_files(project_root, None); 83 | 84 | list.par_extend(project_files.par_iter().filter_map(move |path| { 85 | let relative_module_name = Path::new(path) 86 | .strip_prefix(project_root) 87 | .ok()? 88 | .with_extension("") 89 | .to_str()? 90 | .replace(MAIN_SEPARATOR, "."); 91 | let module_name = format!("{}.{}", root_name, relative_module_name); 92 | let source = read_to_string(path).ok()?; 93 | let import_query = AmImportQuery::try_new().ok()?; 94 | let decorator_name = import_query.get_decorator_name(source.as_str()).ok()?; 95 | let query = AmQuery::try_new(decorator_name.as_str()).ok()?; 96 | let file_name = PathBuf::from(path) 97 | .strip_prefix(project_root) 98 | .expect("path comes from a project_root WalkDir") 99 | .to_str() 100 | .expect("file_name is a valid path as it is part of `path`") 101 | .to_string(); 102 | let names = query 103 | .list_function_names(&file_name, &source, module_name.as_str()) 104 | .unwrap_or_default(); 105 | Some(names) 106 | })); 107 | 108 | let mut result = Vec::with_capacity(PREALLOCATED_ELEMS); 109 | result.extend(list.into_iter().flatten()); 110 | Ok(result) 111 | } 112 | 113 | fn list_all_function_definitions(&mut self, project_root: &Path) -> Result> { 114 | const PREALLOCATED_ELEMS: usize = 100; 115 | let mut list = HashSet::with_capacity(PREALLOCATED_ELEMS); 116 | let root_name = project_root 117 | .file_name() 118 | .map(|s| s.to_str().unwrap_or_default()) 119 | .unwrap_or(""); 120 | 121 | let project_files = Self::list_files(project_root, None); 122 | 123 | list.par_extend(project_files.par_iter().filter_map(move |path| { 124 | let relative_module_name = Path::new(path) 125 | .strip_prefix(project_root) 126 | .ok()? 127 | .with_extension("") 128 | .to_str()? 129 | .replace(MAIN_SEPARATOR, "."); 130 | let module_name = format!("{}.{}", root_name, relative_module_name); 131 | let source = read_to_string(path).ok()?; 132 | let file_name = PathBuf::from(path) 133 | .strip_prefix(project_root) 134 | .expect("path comes from a project_root WalkDir") 135 | .to_str() 136 | .expect("file_name is a valid path as it is part of `path`") 137 | .to_string(); 138 | let query = AllFunctionsQuery::try_new().ok()?; 139 | let names = query 140 | .list_function_names(&file_name, &source, module_name.as_str()) 141 | .unwrap_or_default(); 142 | Some(names) 143 | })); 144 | 145 | let mut result = Vec::with_capacity(PREALLOCATED_ELEMS); 146 | result.extend(list.into_iter().flatten()); 147 | Ok(result) 148 | } 149 | 150 | fn list_autometrics_functions_in_single_file( 151 | &mut self, 152 | source_code: &str, 153 | ) -> Result> { 154 | let import_query = AmImportQuery::try_new()?; 155 | let decorator_name = import_query.get_decorator_name(source_code).ok(); 156 | if decorator_name.is_none() { 157 | return Ok(Vec::new()); 158 | } 159 | let query = AmQuery::try_new(decorator_name.as_ref().unwrap())?; 160 | query.list_function_names("", source_code, "") 161 | } 162 | 163 | fn list_all_function_definitions_in_single_file( 164 | &mut self, 165 | source_code: &str, 166 | ) -> Result> { 167 | let query = AllFunctionsQuery::try_new()?; 168 | query.list_function_names("", source_code, "") 169 | } 170 | } 171 | 172 | impl InstrumentFile for Impl { 173 | fn instrument_source_code(&mut self, source: &str) -> Result { 174 | const DEF_LEN: usize = "def ".len(); 175 | 176 | let mut locations = self.list_all_functions_in_single_file(source)?; 177 | locations.sort_by_key(|info| { 178 | info.definition 179 | .as_ref() 180 | .map(|def| def.range.start.line) 181 | .unwrap_or_default() 182 | }); 183 | 184 | let has_am_directive = source 185 | .lines() 186 | .any(|line| line.contains("from autometrics import autometrics")); 187 | 188 | let mut new_code = crop::Rope::from(source); 189 | // Keeping track of inserted lines to update the byte offset to insert code to, 190 | // only works if the locations list is sorted from top to bottom 191 | let mut inserted_lines = 0; 192 | 193 | if !has_am_directive { 194 | new_code.insert(0, "from autometrics import autometrics\n"); 195 | inserted_lines += 1; 196 | } 197 | 198 | for function_info in locations { 199 | if function_info.definition.is_none() || function_info.instrumentation.is_some() { 200 | continue; 201 | } 202 | 203 | let def_line = function_info.definition.as_ref().unwrap().range.start.line; 204 | let def_col = function_info 205 | .definition 206 | .unwrap() 207 | .range 208 | .start 209 | .column 210 | .saturating_sub(DEF_LEN); 211 | let byte_offset = new_code.byte_of_line(inserted_lines + def_line); 212 | new_code.insert( 213 | byte_offset, 214 | format!("{}@autometrics\n", " ".repeat(def_col)), 215 | ); 216 | inserted_lines += 1; 217 | } 218 | 219 | Ok(new_code.to_string()) 220 | } 221 | 222 | fn instrument_project( 223 | &mut self, 224 | project_root: &Path, 225 | exclude_patterns: Option<&ignore::gitignore::Gitignore>, 226 | ) -> Result<()> { 227 | let sources_modules = Self::list_files(project_root, exclude_patterns); 228 | 229 | for path in sources_modules { 230 | if std::fs::metadata(&path)?.is_dir() { 231 | continue; 232 | } 233 | debug!("Instrumenting {path}"); 234 | let old_source = read_to_string(&path)?; 235 | let new_source = self.instrument_source_code(&old_source)?; 236 | std::fs::write(path, new_source)?; 237 | } 238 | 239 | Ok(()) 240 | } 241 | } 242 | 243 | #[cfg(test)] 244 | mod tests; 245 | -------------------------------------------------------------------------------- /am_list/src/python/queries.rs: -------------------------------------------------------------------------------- 1 | use crate::{AmlError, FunctionInfo, Location, Result, FUNC_NAME_CAPTURE}; 2 | use tree_sitter::{Parser, Query}; 3 | use tree_sitter_python::language; 4 | 5 | const IMPORT_ALIAS_CAPTURE: &str = "import.alias"; 6 | 7 | fn new_parser() -> Result { 8 | let mut parser = Parser::new(); 9 | parser.set_language(language())?; 10 | Ok(parser) 11 | } 12 | 13 | fn get_node_qualname(node: &tree_sitter::Node, source: &str) -> Result { 14 | let mut parts = Vec::new(); 15 | let mut node = node.clone().parent().ok_or(AmlError::InvalidText)?; 16 | while let Some(parent) = node.parent() { 17 | match parent.kind() { 18 | "class_definition" | "function_definition" => { 19 | let name = parent 20 | .named_child(0) 21 | .ok_or(AmlError::InvalidText)? 22 | .utf8_text(source.as_bytes()) 23 | .map(ToString::to_string) 24 | .map_err(|_| AmlError::InvalidText)?; 25 | if parent.kind() == "class_definition" { 26 | parts.push(name); 27 | } else { 28 | parts.extend(vec!["".to_string(), name]); 29 | } 30 | } 31 | _ => {} 32 | } 33 | node = parent; 34 | } 35 | parts.reverse(); 36 | Ok(parts.join(".")) 37 | } 38 | 39 | /// Query wrapper for "all autometrics functions in source" 40 | #[derive(Debug)] 41 | pub(super) struct AmQuery { 42 | query: Query, 43 | /// Index of the capture for a function name. 44 | func_name_idx: u32, 45 | } 46 | 47 | impl AmQuery { 48 | /// Failible constructor. 49 | /// 50 | /// The constructor only fails if the given tree-sitter query does not have the 51 | /// necessary named captures. 52 | pub fn try_new(decorator_name: &str) -> Result { 53 | let am_query_str = format!( 54 | include_str!("../../runtime/queries/python/autometrics.scm.tpl"), 55 | decorator_name 56 | ); 57 | let query = Query::new(language(), &am_query_str)?; 58 | let func_name_idx = query 59 | .capture_index_for_name(FUNC_NAME_CAPTURE) 60 | .ok_or_else(|| AmlError::MissingNamedCapture(FUNC_NAME_CAPTURE.to_string()))?; 61 | Ok(Self { 62 | query, 63 | func_name_idx, 64 | }) 65 | } 66 | 67 | pub fn list_function_names( 68 | &self, 69 | file_name: &str, 70 | source: &str, 71 | module_name: &str, 72 | ) -> Result> { 73 | let mut parser = new_parser()?; 74 | let parsed_source = parser.parse(source, None).ok_or(AmlError::Parsing)?; 75 | 76 | let mut cursor = tree_sitter::QueryCursor::new(); 77 | cursor 78 | .matches(&self.query, parsed_source.root_node(), source.as_bytes()) 79 | .filter_map(|m| { 80 | let node = m.nodes_for_capture_index(self.func_name_idx).next()?; 81 | let start = node.start_position(); 82 | let end = node.end_position(); 83 | let instrumentation = Some(Location::from((file_name, start, end))); 84 | let definition = Some(Location::from((file_name, start, end))); 85 | 86 | let func_name = node.utf8_text(source.as_bytes()).ok()?.to_string(); 87 | let qualname = get_node_qualname(&node, source).ok()?; 88 | let full_name = if qualname.is_empty() { 89 | func_name 90 | } else { 91 | format!("{}.{}", qualname, func_name) 92 | }; 93 | Some(Ok(FunctionInfo { 94 | id: (module_name, full_name).into(), 95 | instrumentation, 96 | definition, 97 | })) 98 | }) 99 | .collect::, _>>() 100 | } 101 | } 102 | 103 | /// Query wrapper for autometrics decorator imports in source 104 | #[derive(Debug)] 105 | pub(super) struct AmImportQuery { 106 | query: Query, 107 | /// Index of the capture for import alias 108 | import_alias_idx: u32, 109 | } 110 | 111 | impl AmImportQuery { 112 | /// Failible constructor. 113 | /// 114 | /// The constructor only fails if the given tree-sitter query does not have the 115 | /// necessary named captures. 116 | pub fn try_new() -> Result { 117 | let query = Query::new( 118 | language(), 119 | include_str!("../../runtime/queries/python/import.scm"), 120 | )?; 121 | let import_alias_idx = query 122 | .capture_index_for_name(IMPORT_ALIAS_CAPTURE) 123 | .ok_or_else(|| AmlError::MissingNamedCapture(IMPORT_ALIAS_CAPTURE.to_string()))?; 124 | Ok(Self { 125 | query, 126 | import_alias_idx, 127 | }) 128 | } 129 | 130 | pub fn get_decorator_name(&self, source: &str) -> Result { 131 | let mut parser = new_parser()?; 132 | let parsed_source = parser.parse(source, None).ok_or(AmlError::Parsing)?; 133 | 134 | let mut cursor = tree_sitter::QueryCursor::new(); 135 | let matches = cursor 136 | .matches(&self.query, parsed_source.root_node(), source.as_bytes()) 137 | .collect::>(); 138 | if matches.len() != 1 { 139 | return Err(AmlError::InvalidText); 140 | } 141 | let alias = matches[0] 142 | .captures 143 | .iter() 144 | .find(|c| c.index == self.import_alias_idx) 145 | .map(|c| c.node.utf8_text(source.as_bytes()).map(ToString::to_string)); 146 | match alias { 147 | Some(Ok(alias)) => Ok(alias), 148 | None => Ok("autometrics".to_string()), 149 | _ => Err(AmlError::InvalidText), 150 | } 151 | } 152 | } 153 | 154 | /// Query wrapper for "all functions in source" 155 | #[derive(Debug)] 156 | pub(super) struct AllFunctionsQuery { 157 | query: Query, 158 | /// Index of the capture for a function name. 159 | func_name_idx: u32, 160 | } 161 | 162 | impl AllFunctionsQuery { 163 | /// Failible constructor. 164 | /// 165 | /// The constructor only fails if the given tree-sitter query does not have the 166 | /// necessary named captures. 167 | pub fn try_new() -> Result { 168 | let query = Query::new( 169 | language(), 170 | include_str!("../../runtime/queries/python/all_functions.scm"), 171 | )?; 172 | let func_name_idx = query 173 | .capture_index_for_name(FUNC_NAME_CAPTURE) 174 | .ok_or_else(|| AmlError::MissingNamedCapture(FUNC_NAME_CAPTURE.to_string()))?; 175 | 176 | Ok(Self { 177 | query, 178 | func_name_idx, 179 | }) 180 | } 181 | 182 | pub fn list_function_names( 183 | &self, 184 | file_name: &str, 185 | source: &str, 186 | module_name: &str, 187 | ) -> Result> { 188 | let mut parser = new_parser()?; 189 | let parsed_source = parser.parse(source, None).ok_or(AmlError::Parsing)?; 190 | 191 | let mut cursor = tree_sitter::QueryCursor::new(); 192 | cursor 193 | .matches(&self.query, parsed_source.root_node(), source.as_bytes()) 194 | .filter_map(|capture| -> Option> { 195 | let node = capture 196 | .captures 197 | .iter() 198 | .find(|c| c.index == self.func_name_idx)? 199 | .node; 200 | let start = node.start_position(); 201 | let end = node.end_position(); 202 | let instrumentation = None; 203 | let definition = Some(Location::from((file_name, start, end))); 204 | let func_name = node.utf8_text(source.as_bytes()).ok()?.to_string(); 205 | let qualname = get_node_qualname(&node, source).ok()?; 206 | let full_name = if qualname.is_empty() { 207 | func_name 208 | } else { 209 | format!("{}.{}", qualname, func_name) 210 | }; 211 | Some(Ok(FunctionInfo { 212 | id: (module_name, full_name).into(), 213 | instrumentation, 214 | definition, 215 | })) 216 | }) 217 | .collect::, _>>() 218 | } 219 | } 220 | -------------------------------------------------------------------------------- /am_list/src/python/tests.rs: -------------------------------------------------------------------------------- 1 | //! These tests are mostly for the queries, to ensure that querying only 2 | //! autometricized functions, or all functions, give the correct set of 3 | //! [`FunctionInfo`] entries. It is up to the [`Impl`] structure for each 4 | //! language to then merge the sets so that functions that get detected by both 5 | //! queries have their information merged. 6 | 7 | use crate::{Location, Position, Range}; 8 | 9 | use super::*; 10 | use pretty_assertions::assert_eq; 11 | 12 | const DUMMY_MODULE: &str = "dummy"; 13 | const FILE_NAME: &str = "source.py"; 14 | 15 | #[test] 16 | fn detect_simple() { 17 | let source = r#" 18 | from autometrics import autometrics 19 | 20 | @autometrics 21 | def the_one(): 22 | return 'wake up, Neo' 23 | "#; 24 | 25 | let import_query = AmImportQuery::try_new().unwrap(); 26 | let import_name = import_query.get_decorator_name(source).unwrap(); 27 | let query = AmQuery::try_new(import_name.as_str()).unwrap(); 28 | let list = query 29 | .list_function_names(FILE_NAME, source, DUMMY_MODULE) 30 | .unwrap(); 31 | let all_query = AllFunctionsQuery::try_new().unwrap(); 32 | let all_list = all_query 33 | .list_function_names(FILE_NAME, source, DUMMY_MODULE) 34 | .unwrap(); 35 | 36 | let the_one_location = Location { 37 | file: FILE_NAME.to_string(), 38 | range: Range { 39 | start: Position { 40 | line: 4, 41 | column: 12, 42 | }, 43 | end: Position { 44 | line: 4, 45 | column: 12 + "the_one".len(), 46 | }, 47 | }, 48 | }; 49 | 50 | let the_one = FunctionInfo { 51 | id: ("dummy", "the_one").into(), 52 | instrumentation: None, 53 | definition: Some(the_one_location.clone()), 54 | }; 55 | 56 | let the_one_instrumented = FunctionInfo { 57 | id: ("dummy", "the_one").into(), 58 | instrumentation: Some(the_one_location.clone()), 59 | definition: Some(the_one_location), 60 | }; 61 | 62 | assert_eq!(list.len(), 1); 63 | assert_eq!(list[0], the_one_instrumented); 64 | assert_eq!(all_list.len(), 1); 65 | assert_eq!(all_list[0], the_one); 66 | } 67 | 68 | #[test] 69 | fn detect_alias() { 70 | let source = r#" 71 | from autometrics import autometrics as am 72 | 73 | @am 74 | def the_one(): 75 | return 'wake up, Neo' 76 | "#; 77 | 78 | let import_query = AmImportQuery::try_new().unwrap(); 79 | let import_name = import_query.get_decorator_name(source).unwrap(); 80 | let query = AmQuery::try_new(import_name.as_str()).unwrap(); 81 | let list = query 82 | .list_function_names(FILE_NAME, source, DUMMY_MODULE) 83 | .unwrap(); 84 | let all_query = AllFunctionsQuery::try_new().unwrap(); 85 | let all_list = all_query 86 | .list_function_names(FILE_NAME, source, DUMMY_MODULE) 87 | .unwrap(); 88 | 89 | let the_one_location = Location { 90 | file: FILE_NAME.to_string(), 91 | range: Range { 92 | start: Position { 93 | line: 4, 94 | column: 12, 95 | }, 96 | end: Position { 97 | line: 4, 98 | column: 12 + "the_one".len(), 99 | }, 100 | }, 101 | }; 102 | 103 | let the_one = FunctionInfo { 104 | id: ("dummy", "the_one").into(), 105 | instrumentation: None, 106 | definition: Some(the_one_location.clone()), 107 | }; 108 | 109 | let the_one_instrumented = FunctionInfo { 110 | id: ("dummy", "the_one").into(), 111 | instrumentation: Some(the_one_location.clone()), 112 | definition: Some(the_one_location), 113 | }; 114 | 115 | assert_eq!(list.len(), 1); 116 | assert_eq!(list[0], the_one_instrumented); 117 | assert_eq!(all_list.len(), 1); 118 | assert_eq!(all_list[0], the_one); 119 | } 120 | 121 | #[test] 122 | fn detect_nested() { 123 | let source = r#" 124 | from autometrics import autometrics 125 | 126 | @autometrics 127 | def the_one(): 128 | @autometrics 129 | def the_two(): 130 | return 'wake up, Neo' 131 | return the_two() 132 | "#; 133 | 134 | let import_query = AmImportQuery::try_new().unwrap(); 135 | let import_name = import_query.get_decorator_name(source).unwrap(); 136 | let query = AmQuery::try_new(import_name.as_str()).unwrap(); 137 | let list = query 138 | .list_function_names(FILE_NAME, source, DUMMY_MODULE) 139 | .unwrap(); 140 | let all_query = AllFunctionsQuery::try_new().unwrap(); 141 | let all_list = all_query 142 | .list_function_names(FILE_NAME, source, DUMMY_MODULE) 143 | .unwrap(); 144 | 145 | let the_one_location = Location { 146 | file: FILE_NAME.to_string(), 147 | range: Range { 148 | start: Position { 149 | line: 4, 150 | column: 12, 151 | }, 152 | end: Position { 153 | line: 4, 154 | column: 12 + "the_one".len(), 155 | }, 156 | }, 157 | }; 158 | 159 | let the_two_location = Location { 160 | file: FILE_NAME.to_string(), 161 | range: Range { 162 | start: Position { 163 | line: 6, 164 | column: 16, 165 | }, 166 | end: Position { 167 | line: 6, 168 | column: 16 + "the_two".len(), 169 | }, 170 | }, 171 | }; 172 | 173 | let the_one = FunctionInfo { 174 | id: ("dummy", "the_one").into(), 175 | instrumentation: None, 176 | definition: Some(the_one_location.clone()), 177 | }; 178 | let the_two = FunctionInfo { 179 | id: ("dummy", "the_one..the_two").into(), 180 | instrumentation: None, 181 | definition: Some(the_two_location.clone()), 182 | }; 183 | let the_one_instrumented = FunctionInfo { 184 | id: ("dummy", "the_one").into(), 185 | instrumentation: Some(the_one_location.clone()), 186 | definition: Some(the_one_location), 187 | }; 188 | let the_two_instrumented = FunctionInfo { 189 | id: ("dummy", "the_one..the_two").into(), 190 | instrumentation: Some(the_two_location.clone()), 191 | definition: Some(the_two_location), 192 | }; 193 | 194 | assert_eq!(list.len(), 2); 195 | assert!(list.contains(&the_one_instrumented)); 196 | assert!(list.contains(&the_two_instrumented)); 197 | assert_eq!(all_list.len(), 2); 198 | assert!(all_list.contains(&the_one)); 199 | assert!(all_list.contains(&the_two)); 200 | } 201 | 202 | #[test] 203 | fn instrument_nested() { 204 | let source = r#" 205 | import os 206 | 207 | def the_one(): 208 | def the_two(): 209 | return 'wake up, Neo' 210 | return the_two() 211 | "#; 212 | 213 | let expected = r#"from autometrics import autometrics 214 | 215 | import os 216 | 217 | @autometrics 218 | def the_one(): 219 | @autometrics 220 | def the_two(): 221 | return 'wake up, Neo' 222 | return the_two() 223 | "#; 224 | 225 | let mut implementation = Impl {}; 226 | let actual = implementation.instrument_source_code(source).unwrap(); 227 | assert_eq!(&actual, expected); 228 | } 229 | 230 | #[test] 231 | fn instrument_multiple() { 232 | let source = r#" 233 | from autometrics import autometrics 234 | 235 | def the_one(): 236 | return 'wake up, Neo' 237 | 238 | @autometrics 239 | def the_two(): 240 | return 'wake up, Neo' 241 | 242 | def the_three(): 243 | return 'wake up, Neo' 244 | "#; 245 | 246 | let expected = r#" 247 | from autometrics import autometrics 248 | 249 | @autometrics 250 | def the_one(): 251 | return 'wake up, Neo' 252 | 253 | @autometrics 254 | def the_two(): 255 | return 'wake up, Neo' 256 | 257 | @autometrics 258 | def the_three(): 259 | return 'wake up, Neo' 260 | "#; 261 | 262 | let mut implementation = Impl {}; 263 | let actual = implementation.instrument_source_code(source).unwrap(); 264 | assert_eq!(&actual, expected); 265 | } 266 | -------------------------------------------------------------------------------- /am_list/src/roots.rs: -------------------------------------------------------------------------------- 1 | use log::debug; 2 | use walkdir::{DirEntry, WalkDir}; 3 | 4 | use crate::{AmlError, Language}; 5 | use std::{ 6 | collections::HashSet, 7 | path::{Path, PathBuf}, 8 | }; 9 | 10 | /// Use file heuristics to detect valid project roots under the given directory. 11 | pub fn find_project_roots(repo: &Path) -> Result, AmlError> { 12 | let abs_repo = repo.canonicalize().map_err(|_| AmlError::InvalidPath)?; 13 | debug!("Looking for roots in {}", abs_repo.display()); 14 | let rust_roots = find_rust_roots(&abs_repo) 15 | .into_iter() 16 | .map(|project_root| (project_root, Language::Rust)); 17 | let ts_roots = find_typescript_roots(&abs_repo) 18 | .into_iter() 19 | .map(|project_root| (project_root, Language::Typescript)); 20 | let go_roots = find_go_roots(&abs_repo) 21 | .into_iter() 22 | .map(|project_root| (project_root, Language::Go)); 23 | let py_roots = find_py_roots(&abs_repo) 24 | .into_iter() 25 | .map(|project_root| (project_root, Language::Python)); 26 | 27 | Ok(rust_roots 28 | .chain(ts_roots) 29 | .chain(go_roots) 30 | .chain(py_roots) 31 | .collect()) 32 | } 33 | 34 | fn is_hidden(entry: &DirEntry) -> bool { 35 | entry.depth() != 0 36 | && entry 37 | .file_name() 38 | .to_str() 39 | .map(|s| s.starts_with('.')) 40 | .unwrap_or(false) 41 | } 42 | 43 | fn find_rust_roots(repo: &Path) -> Vec { 44 | fn is_in_target(entry: &DirEntry) -> bool { 45 | let mut depth = entry.depth(); 46 | let mut pointer = entry.path(); 47 | while depth > 0 { 48 | if pointer 49 | .file_name() 50 | .and_then(|s| s.to_str()) 51 | .map(|s| s == "target") 52 | .unwrap_or(false) 53 | && pointer.is_dir() 54 | { 55 | return true; 56 | } 57 | 58 | depth -= 1; 59 | pointer = match pointer.parent() { 60 | Some(new_pointer) => new_pointer, 61 | None => { 62 | return false; 63 | } 64 | }; 65 | } 66 | 67 | false 68 | } 69 | 70 | let walker = WalkDir::new(repo).into_iter(); 71 | walker 72 | .filter_entry(|e| !is_hidden(e) && !is_in_target(e)) 73 | .filter_map(|e| -> Option { 74 | match e { 75 | Ok(path) => { 76 | if path.file_type().is_file() && path.file_name().to_str() == Some("Cargo.toml") 77 | { 78 | path.path().parent().map(Path::to_path_buf) 79 | } else { 80 | None 81 | } 82 | } 83 | _ => None, 84 | } 85 | }) 86 | .collect() 87 | } 88 | 89 | fn find_typescript_roots(repo: &Path) -> Vec { 90 | fn is_in_node_modules(entry: &DirEntry) -> bool { 91 | let mut depth = entry.depth(); 92 | let mut pointer = entry.path(); 93 | while depth > 0 { 94 | if pointer 95 | .file_name() 96 | .and_then(|s| s.to_str()) 97 | .map(|s| s == "node_modules") 98 | .unwrap_or(false) 99 | && pointer.is_dir() 100 | { 101 | return true; 102 | } 103 | 104 | depth -= 1; 105 | pointer = match pointer.parent() { 106 | Some(new_pointer) => new_pointer, 107 | None => { 108 | return false; 109 | } 110 | }; 111 | } 112 | 113 | false 114 | } 115 | 116 | let walker = WalkDir::new(repo).into_iter(); 117 | walker 118 | .filter_entry(|e| !is_hidden(e) && !is_in_node_modules(e)) 119 | .filter_map(|e| -> Option { 120 | match e { 121 | Ok(path) => { 122 | if path.file_type().is_file() 123 | && path.file_name().to_str() == Some("package.json") 124 | { 125 | path.path().parent().map(Path::to_path_buf) 126 | } else { 127 | None 128 | } 129 | } 130 | _ => None, 131 | } 132 | }) 133 | .collect() 134 | } 135 | 136 | fn find_go_roots(repo: &Path) -> Vec { 137 | fn is_in_vendor(entry: &DirEntry) -> bool { 138 | let mut depth = entry.depth(); 139 | let mut pointer = entry.path(); 140 | while depth > 0 { 141 | if pointer 142 | .file_name() 143 | .and_then(|s| s.to_str()) 144 | .map(|s| s == "vendor") 145 | .unwrap_or(false) 146 | && pointer.is_dir() 147 | { 148 | return true; 149 | } 150 | 151 | depth -= 1; 152 | pointer = match pointer.parent() { 153 | Some(new_pointer) => new_pointer, 154 | None => { 155 | return false; 156 | } 157 | }; 158 | } 159 | 160 | false 161 | } 162 | 163 | let walker = WalkDir::new(repo).into_iter(); 164 | walker 165 | .filter_entry(|e| !is_hidden(e) && !is_in_vendor(e)) 166 | .filter_map(|e| -> Option { 167 | match e { 168 | Ok(path) => { 169 | if path.file_type().is_file() && path.file_name().to_str() == Some("go.mod") { 170 | path.path().parent().map(Path::to_path_buf) 171 | } else { 172 | None 173 | } 174 | } 175 | _ => None, 176 | } 177 | }) 178 | .collect() 179 | } 180 | 181 | fn find_py_roots(repo: &Path) -> HashSet { 182 | let walker = WalkDir::new(repo).into_iter(); 183 | walker 184 | .filter_entry(|e| !is_hidden(e)) 185 | .filter_map(|e| -> Option { 186 | match e { 187 | Ok(path) => { 188 | if path.file_type().is_file() { 189 | match path.file_name().to_str() { 190 | Some("setup.py") 191 | | Some("requirements.txt") 192 | | Some("pyproject.toml") => path.path().parent().map(Path::to_path_buf), 193 | _ => None, 194 | } 195 | } else { 196 | None 197 | } 198 | } 199 | _ => None, 200 | } 201 | }) 202 | .collect() 203 | } 204 | -------------------------------------------------------------------------------- /am_list/src/rust.rs: -------------------------------------------------------------------------------- 1 | mod queries; 2 | 3 | use self::queries::{AllFunctionsQuery, AmQuery}; 4 | use crate::{FunctionInfo, InstrumentFile, ListAmFunctions, Result}; 5 | use log::debug; 6 | use rayon::prelude::*; 7 | use std::{ 8 | collections::{HashSet, VecDeque}, 9 | fs::read_to_string, 10 | path::{Path, PathBuf}, 11 | }; 12 | use walkdir::{DirEntry, WalkDir}; 13 | 14 | #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 15 | struct AmStruct { 16 | module: String, 17 | strc: String, 18 | } 19 | 20 | /// Implementation of the Rust support for listing autometricized functions. 21 | #[derive(Clone, Copy, Debug, Default)] 22 | pub struct Impl {} 23 | 24 | impl Impl { 25 | fn is_hidden(entry: &DirEntry) -> bool { 26 | entry 27 | .file_name() 28 | .to_str() 29 | .map(|s| { 30 | s.starts_with('.') || 31 | // We only ignore folders/files named "target" if they are at 32 | // the root of the project, for the unlikely case where there 33 | // is a "target" module deeper in the project. 34 | (entry.depth() == 1 && s == "target") 35 | }) 36 | .unwrap_or(false) 37 | } 38 | 39 | fn is_valid(entry: &DirEntry) -> bool { 40 | if Impl::is_hidden(entry) { 41 | return false; 42 | } 43 | entry.file_type().is_dir() 44 | || entry 45 | .file_name() 46 | .to_str() 47 | .map(|s| s.ends_with(".rs")) 48 | .unwrap_or(false) 49 | } 50 | 51 | fn fully_qualified_module_name(entry: &DirEntry) -> String { 52 | let mut current_depth = entry.depth(); 53 | let mut mod_name_elements = VecDeque::with_capacity(8); 54 | let mut path = entry.path(); 55 | 56 | // NOTE(magic) 57 | // This "0" magic constant bears the assumption "am_list" is called 58 | // from the root of a crate _or workspace_. 59 | // 60 | // HACK: Using the name of the directory all the time for module will 61 | // only work in workspaces if the sub-crate is always imported as the 62 | // name of its folder. 63 | while current_depth > 0 { 64 | if path.is_dir() { 65 | if let Some(component) = path.file_name() { 66 | let component = component.to_string_lossy(); 67 | if component != "src" { 68 | mod_name_elements.push_front(component.replace('-', "_")); 69 | } 70 | } 71 | } else if path.is_file() { 72 | if let Some(stem) = path 73 | .file_name() 74 | .and_then(|os_str| os_str.to_str()) 75 | .and_then(|file_name| file_name.strip_suffix(".rs")) 76 | { 77 | if stem != "mod" { 78 | mod_name_elements.push_front(stem.to_string()); 79 | } 80 | } 81 | } 82 | 83 | if path.parent().is_some() { 84 | path = path.parent().unwrap(); 85 | current_depth -= 1; 86 | } else { 87 | break; 88 | } 89 | } 90 | 91 | itertools::intersperse(mod_name_elements, "::".to_string()).collect() 92 | } 93 | 94 | fn list_files_and_modules( 95 | project_root: &Path, 96 | exclude_patterns: Option<&ignore::gitignore::Gitignore>, 97 | ) -> Vec<(String, String)> { 98 | const PREALLOCATED_ELEMS: usize = 100; 99 | 100 | let walker = WalkDir::new(project_root).into_iter(); 101 | let mut source_mod_pairs = Vec::with_capacity(PREALLOCATED_ELEMS); 102 | source_mod_pairs.extend(walker.filter_entry(Self::is_valid).filter_map(|entry| { 103 | let entry = entry.ok()?; 104 | 105 | if let Some(pattern) = exclude_patterns { 106 | let ignore_match = 107 | pattern.matched_path_or_any_parents(entry.path(), entry.file_type().is_dir()); 108 | if matches!(ignore_match, ignore::Match::Ignore(_)) { 109 | debug!( 110 | "The exclusion pattern got a match on {}: {:?}", 111 | entry.path().display(), 112 | ignore_match 113 | ); 114 | return None; 115 | } 116 | } 117 | 118 | let module = Self::fully_qualified_module_name(&entry); 119 | Some(( 120 | entry 121 | .path() 122 | .to_str() 123 | .map(ToString::to_string) 124 | .unwrap_or_default(), 125 | module, 126 | )) 127 | })); 128 | 129 | source_mod_pairs 130 | } 131 | } 132 | 133 | impl ListAmFunctions for Impl { 134 | fn list_autometrics_functions(&mut self, project_root: &Path) -> Result> { 135 | const PREALLOCATED_ELEMS: usize = 100; 136 | let mut list = HashSet::with_capacity(PREALLOCATED_ELEMS); 137 | let query = AmQuery::try_new()?; 138 | let source_mod_pairs = Self::list_files_and_modules(project_root, None); 139 | 140 | list.par_extend( 141 | source_mod_pairs 142 | .par_iter() 143 | .filter_map(move |(path, module)| { 144 | let source = read_to_string(path).ok()?; 145 | let file_name = PathBuf::from(path) 146 | .strip_prefix(project_root) 147 | .expect("path comes from a project_root WalkDir") 148 | .to_str() 149 | .expect("file_name is a valid path as it is part of `path`") 150 | .to_string(); 151 | let am_functions = query 152 | .list_function_names(&file_name, module.clone(), &source) 153 | .unwrap_or_default(); 154 | Some(am_functions) 155 | }), 156 | ); 157 | 158 | let mut result = Vec::with_capacity(PREALLOCATED_ELEMS); 159 | result.extend(list.into_iter().flatten()); 160 | Ok(result) 161 | } 162 | 163 | fn list_all_function_definitions(&mut self, project_root: &Path) -> Result> { 164 | const PREALLOCATED_ELEMS: usize = 400; 165 | let mut list = HashSet::with_capacity(PREALLOCATED_ELEMS); 166 | let source_mod_pairs = Self::list_files_and_modules(project_root, None); 167 | let query = AllFunctionsQuery::try_new()?; 168 | 169 | list.par_extend( 170 | source_mod_pairs 171 | .par_iter() 172 | .filter_map(move |(path, module)| { 173 | let source = read_to_string(path).ok()?; 174 | let file_name = PathBuf::from(path) 175 | .strip_prefix(project_root) 176 | .expect("path comes from a project_root WalkDir") 177 | .to_str() 178 | .expect("file_name is a valid path as it is part of `path`") 179 | .to_string(); 180 | let am_functions = query 181 | .list_function_names(&file_name, module.clone(), &source) 182 | .unwrap_or_default(); 183 | Some(am_functions) 184 | }), 185 | ); 186 | 187 | let mut result = Vec::with_capacity(PREALLOCATED_ELEMS); 188 | result.extend(list.into_iter().flatten()); 189 | Ok(result) 190 | } 191 | 192 | fn list_autometrics_functions_in_single_file( 193 | &mut self, 194 | source_code: &str, 195 | ) -> Result> { 196 | let query = AmQuery::try_new()?; 197 | query.list_function_names("", String::new(), source_code) 198 | } 199 | 200 | fn list_all_function_definitions_in_single_file( 201 | &mut self, 202 | source_code: &str, 203 | ) -> Result> { 204 | let query = AllFunctionsQuery::try_new()?; 205 | query.list_function_names("", String::new(), source_code) 206 | } 207 | } 208 | 209 | impl InstrumentFile for Impl { 210 | fn instrument_source_code(&mut self, source: &str) -> Result { 211 | let mut locations = self.list_all_functions_in_single_file(source)?; 212 | locations.sort_by_key(|info| { 213 | info.definition 214 | .as_ref() 215 | .map(|def| def.range.start.line) 216 | .unwrap_or_default() 217 | }); 218 | 219 | let mut new_code = crop::Rope::from(source); 220 | // Keeping track of inserted lines to update the byte offset to insert code to, 221 | // only works if the locations list is sorted from top to bottom 222 | let mut inserted_lines = 0; 223 | 224 | for function_info in locations { 225 | if function_info.definition.is_none() || function_info.instrumentation.is_some() { 226 | continue; 227 | } 228 | 229 | let def_line = function_info.definition.as_ref().unwrap().range.start.line; 230 | let byte_offset = new_code.byte_of_line(inserted_lines + def_line); 231 | new_code.insert(byte_offset, "#[autometrics::autometrics]\n"); 232 | inserted_lines += 1; 233 | } 234 | 235 | Ok(new_code.to_string()) 236 | } 237 | 238 | fn instrument_project( 239 | &mut self, 240 | project_root: &Path, 241 | exclude_patterns: Option<&ignore::gitignore::Gitignore>, 242 | ) -> Result<()> { 243 | let sources_modules = Self::list_files_and_modules(project_root, exclude_patterns); 244 | 245 | for (path, _module) in sources_modules { 246 | if std::fs::metadata(&path)?.is_dir() { 247 | continue; 248 | } 249 | debug!("Instrumenting {path}"); 250 | let old_source = read_to_string(&path)?; 251 | let new_source = self.instrument_source_code(&old_source)?; 252 | std::fs::write(path, new_source)?; 253 | } 254 | 255 | Ok(()) 256 | } 257 | } 258 | 259 | #[cfg(test)] 260 | mod tests; 261 | -------------------------------------------------------------------------------- /am_list/src/typescript/imports.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | path::{Path, PathBuf}, 4 | }; 5 | 6 | /// Relative source of an import 7 | #[derive(Clone, Debug, Default, PartialEq, Eq)] 8 | pub struct Source(String); 9 | 10 | impl> From for Source { 11 | fn from(value: T) -> Self { 12 | Self(value.into()) 13 | } 14 | } 15 | 16 | impl ToString for Source { 17 | fn to_string(&self) -> String { 18 | self.0.clone() 19 | } 20 | } 21 | 22 | impl Source { 23 | pub fn into_canonical(self, import_statement_location: Option<&Path>) -> CanonicalSource { 24 | if import_statement_location.map_or(true, |path| path.to_string_lossy().is_empty()) { 25 | // This base case is reached when we called `import_statement_location.parent()` too 26 | // many times, which means the import is a sibling of the import_statement_location given in the beginning. 27 | return CanonicalSource::from(format!("sibling://{}", self.0)); 28 | } 29 | 30 | let import_location = import_statement_location.unwrap(); 31 | 32 | let relative_path = PathBuf::from(self.0); 33 | if let Ok(sibling) = relative_path.strip_prefix("..") { 34 | return Source::from(sibling.to_string_lossy()) 35 | .into_canonical(import_location.parent()); 36 | } 37 | 38 | if let Ok(sub_dir) = relative_path.strip_prefix(".") { 39 | let mut combined_path = import_location.to_path_buf(); 40 | combined_path.push(sub_dir); 41 | CanonicalSource::from(combined_path.as_os_str().to_string_lossy()) 42 | } else { 43 | CanonicalSource::from(format!("ext://{}", relative_path.display())) 44 | } 45 | } 46 | } 47 | 48 | /// Canonical source of an import 49 | /// 50 | /// The import will begin with `ext://` if the import is detected to come from 51 | /// outside the current project. 52 | /// 53 | /// The import will begin with `sibling://` if the import is detected to come 54 | /// from a sibling folder in the same repository as the current project. 55 | #[derive(Clone, Debug, Default, PartialEq, Eq)] 56 | pub struct CanonicalSource(String); 57 | 58 | impl> From for CanonicalSource { 59 | fn from(value: T) -> Self { 60 | Self(value.into()) 61 | } 62 | } 63 | 64 | impl ToString for CanonicalSource { 65 | fn to_string(&self) -> String { 66 | self.0.clone() 67 | } 68 | } 69 | 70 | /// New type for Identifiers to create type safe interfaces. 71 | #[derive(Clone, Debug, Default, Hash, PartialEq, Eq)] 72 | pub struct Identifier(String); 73 | 74 | impl> From for Identifier { 75 | fn from(value: T) -> Self { 76 | Self(value.into()) 77 | } 78 | } 79 | 80 | impl ToString for Identifier { 81 | fn to_string(&self) -> String { 82 | self.0.clone() 83 | } 84 | } 85 | 86 | /// Structure containing the map of imports valid in a given source file. 87 | #[derive(Clone, Debug, Default)] 88 | pub struct ImportsMap { 89 | namespaced_imports: HashMap, 90 | /// Maps: 91 | /// - (real_name) to (real_name, source), and 92 | /// - (aliased_name) to (aliased_name, source) 93 | named_imports: HashMap, 94 | } 95 | 96 | impl ImportsMap { 97 | pub fn find_namespace(&self, namespace: &Identifier) -> Option { 98 | self.namespaced_imports.get(namespace).cloned() 99 | } 100 | 101 | pub fn find_identifier(&self, ident: &Identifier) -> Option<(Identifier, CanonicalSource)> { 102 | self.named_imports.get(ident).cloned() 103 | } 104 | 105 | pub fn add_namespace( 106 | &mut self, 107 | namespace: Identifier, 108 | source: CanonicalSource, 109 | ) -> Option { 110 | self.namespaced_imports.insert(namespace, source) 111 | } 112 | 113 | pub fn add_named_import( 114 | &mut self, 115 | import: Identifier, 116 | source: CanonicalSource, 117 | ) -> Option<(Identifier, CanonicalSource)> { 118 | self.named_imports.insert(import.clone(), (import, source)) 119 | } 120 | 121 | pub fn add_aliased_import( 122 | &mut self, 123 | alias: Identifier, 124 | name_in_source: Identifier, 125 | source: CanonicalSource, 126 | ) -> Option<(Identifier, CanonicalSource)> { 127 | self.named_imports.insert(alias, (name_in_source, source)) 128 | } 129 | 130 | /// Return the original name and the source of the given identifier. 131 | pub fn resolve_ident(&self, ident: Identifier) -> Option<(Identifier, CanonicalSource)> { 132 | let ident_str = ident.to_string(); 133 | 134 | if let Some((namespace, sub_ident)) = ident_str.split_once('.') { 135 | self.find_namespace(&Identifier::from(namespace)) 136 | .map(|canon| (Identifier::from(sub_ident), canon)) 137 | } else { 138 | self.find_identifier(&ident) 139 | } 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /assets/am-explorer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/autometrics-dev/am/6b9b57f21692f7312fff9a57e26410a8534238a4/assets/am-explorer.png -------------------------------------------------------------------------------- /assets/explorer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/autometrics-dev/am/6b9b57f21692f7312fff9a57e26410a8534238a4/assets/explorer.png -------------------------------------------------------------------------------- /autometrics-am/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "autometrics-am" 3 | version.workspace = true 4 | edition.workspace = true 5 | authors.workspace = true 6 | documentation.workspace = true 7 | repository.workspace = true 8 | homepage.workspace = true 9 | license.workspace = true 10 | 11 | [dependencies] 12 | anyhow = { workspace = true } 13 | humantime = { workspace = true } 14 | humantime-serde = "1.1.1" 15 | serde = { workspace = true } 16 | url = { workspace = true } 17 | -------------------------------------------------------------------------------- /autometrics-am/src/config.rs: -------------------------------------------------------------------------------- 1 | use crate::parser::endpoint_parser; 2 | use serde::de::Error; 3 | use serde::{Deserialize, Deserializer, Serialize}; 4 | use std::sync::atomic::{AtomicUsize, Ordering}; 5 | use std::time::Duration; 6 | use url::Url; 7 | 8 | /// This struct represents the am.toml configuration. Most properties in here 9 | /// are optional so that the user only specifies the ones that they want in that 10 | /// file. 11 | #[derive(Serialize, Deserialize, Default)] 12 | #[serde(rename_all = "kebab-case")] 13 | pub struct AmConfig { 14 | /// The endpoints that will be scraped by the Prometheus server. 15 | #[serde(rename = "endpoint")] 16 | pub endpoints: Option>, 17 | 18 | /// Startup the pushgateway. 19 | pub pushgateway_enabled: Option, 20 | 21 | /// The default scrape interval for all Prometheus endpoints. 22 | #[serde(default, with = "humantime_serde::option")] 23 | pub prometheus_scrape_interval: Option, 24 | } 25 | 26 | #[derive(Serialize, Deserialize, Debug, Clone)] 27 | #[serde(rename_all = "kebab-case")] 28 | pub struct Endpoint { 29 | /// The URL of the endpoint that will be scraped by the Prometheus server. 30 | /// Can use shorthand notation for the URL, e.g. `:3000`. 31 | #[serde(deserialize_with = "parse_maybe_shorthand")] 32 | pub url: Url, 33 | 34 | /// The job name as it appears in Prometheus. This value will be added to 35 | /// the scraped metrics as a label. 36 | pub job_name: Option, 37 | 38 | pub honor_labels: Option, 39 | 40 | /// The scrape interval for this endpoint. 41 | #[serde(default, with = "humantime_serde::option")] 42 | pub prometheus_scrape_interval: Option, 43 | } 44 | 45 | fn parse_maybe_shorthand<'de, D: Deserializer<'de>>(input: D) -> Result { 46 | let input_str: String = Deserialize::deserialize(input)?; 47 | endpoint_parser(&input_str).map_err(Error::custom) 48 | } 49 | 50 | /// If the user specified an endpoint using args, then use those. 51 | /// Otherwise, use the endpoint configured in the config file. And 52 | /// fallback to an empty list if neither are configured. 53 | pub fn endpoints_from_first_input(args: Vec, config: Option>) -> Vec { 54 | static COUNTER: AtomicUsize = AtomicUsize::new(0); 55 | 56 | if !args.is_empty() { 57 | args.into_iter() 58 | .map(|url| { 59 | let num = COUNTER.fetch_add(1, Ordering::SeqCst); 60 | Endpoint { 61 | url, 62 | job_name: Some(format!("am_{num}")), 63 | honor_labels: Some(false), 64 | prometheus_scrape_interval: None, 65 | } 66 | }) 67 | .collect() 68 | } else if let Some(endpoints) = config { 69 | endpoints 70 | .into_iter() 71 | .map(|endpoint| { 72 | let job_name = endpoint.job_name.unwrap_or_else(|| { 73 | format!("am_{num}", num = COUNTER.fetch_add(1, Ordering::SeqCst)) 74 | }); 75 | 76 | Endpoint { 77 | url: endpoint.url, 78 | job_name: Some(job_name), 79 | honor_labels: endpoint.honor_labels, 80 | prometheus_scrape_interval: endpoint.prometheus_scrape_interval, 81 | } 82 | }) 83 | .collect() 84 | } else { 85 | Vec::new() 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /autometrics-am/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod config; 2 | pub mod parser; 3 | pub mod prometheus; 4 | -------------------------------------------------------------------------------- /autometrics-am/src/parser.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, Context, Result}; 2 | use url::Url; 3 | 4 | /// Parses the input string into a Url. This uses a custom parser to allow for 5 | /// some more flexible input. 6 | /// 7 | /// Parsing adheres to the following rules: 8 | /// - The protocol should only allow for http and https, where http is the 9 | /// default. 10 | /// - The port should follow the default for the protocol, 80 for http and 443 11 | /// for https. 12 | /// - The path should default to /metrics if the path is empty. It should not be 13 | /// appended if a path is already there. 14 | pub fn endpoint_parser(input: &str) -> Result { 15 | let mut input = input.to_owned(); 16 | 17 | if input.starts_with(':') { 18 | // Prepend http://localhost if the input starts with a colon. 19 | input = format!("http://localhost{}", input); 20 | } 21 | 22 | // Prepend http:// if the input does not contain ://. This is a rather naive 23 | // check, but it should suffice for our purposes. 24 | if !input.contains("://") { 25 | input = format!("http://{}", input); 26 | } 27 | 28 | let mut url = 29 | Url::parse(&input).with_context(|| format!("Unable to parse endpoint {}", input))?; 30 | 31 | // Note that this should never be Err(_) since we're always adding http:// 32 | // in front of the input and thus making sure it is not a "cannot-be-a-base" 33 | // URL. 34 | if url.path() == "" || url.path() == "/" { 35 | url.set_path("/metrics"); 36 | } 37 | 38 | if url.scheme() != "http" && url.scheme() != "https" { 39 | bail!("unsupported protocol {}", url.scheme()); 40 | } 41 | 42 | Ok(url) 43 | } 44 | -------------------------------------------------------------------------------- /autometrics-am/src/prometheus.rs: -------------------------------------------------------------------------------- 1 | use serde::Serialize; 2 | use std::time::Duration; 3 | 4 | #[derive(Debug, Serialize)] 5 | pub struct Config { 6 | pub global: GlobalConfig, 7 | pub scrape_configs: Vec, 8 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 9 | pub rule_files: Vec, 10 | } 11 | 12 | #[derive(Debug, Serialize)] 13 | pub struct GlobalConfig { 14 | #[serde(with = "humantime_serde")] 15 | pub scrape_interval: Duration, 16 | pub evaluation_interval: String, 17 | } 18 | 19 | #[derive(Debug, Serialize)] 20 | pub struct ScrapeConfig { 21 | pub job_name: String, 22 | pub static_configs: Vec, 23 | pub metrics_path: Option, 24 | pub scheme: Option, 25 | pub honor_labels: Option, 26 | 27 | #[serde( 28 | default, 29 | with = "humantime_serde::option", 30 | skip_serializing_if = "Option::is_none" 31 | )] 32 | pub scrape_interval: Option, 33 | } 34 | 35 | #[derive(Debug, Serialize)] 36 | pub struct StaticScrapeConfig { 37 | pub targets: Vec, 38 | } 39 | 40 | #[derive(Debug, Serialize)] 41 | #[serde(rename_all = "lowercase")] 42 | pub enum Scheme { 43 | Http, 44 | Https, 45 | } 46 | -------------------------------------------------------------------------------- /docs/container.md: -------------------------------------------------------------------------------- 1 | # AM container 2 | 3 | The `am` binary is primarily designed to be a tool to be run locally, during 4 | development. We do however provide a container that can be used for specific 5 | [use cases](#use-cases), but it does come with [limitations](#limitations) when 6 | running it in a container. 7 | 8 | Note: the examples use Docker, but they should also work with Podman. 9 | 10 | ## Getting started 11 | 12 | We publish our container images on [Docker Hub](https://hub.docker.com/r/autometrics/am). 13 | You can pull the `latest` image by running the following: 14 | 15 | ``` 16 | docker pull autometrics/am:latest 17 | ``` 18 | 19 | Then you can invoke any of `am`'s commands after that: 20 | 21 | ``` 22 | docker run -it --rm autometrics/am:latest --version 23 | ``` 24 | 25 |
26 | `am proxy` 27 | 28 | If you want to run `am proxy` in a container then we recommend that you use 29 | [Docker Hub](https://hub.docker.com/r/autometrics/am-proxy). This container 30 | comes with a entrypoint already set to the proxy command as well as an 31 | environment variables that allows `am` to listen on all addresses. 32 |
33 | 34 | ### Versions 35 | 36 | For production environments we recommend that you use a specific version of the 37 | container. This will ensure that any breaking changes won't affect your setup. 38 | For every version of `am`, we publish a specific container (see [Docker Hub](https://hub.docker.com/r/autometrics/am/tags)). 39 | 40 | ### Local configuration 41 | 42 | If you want to use `am start` locally, then you will need to ensure that the 43 | container is able to reach your application and that your browser is able to 44 | reach the container. This can be done by using the `--network=host` with Docker 45 | or Podman: 46 | 47 | ``` 48 | docker run -it --rm --network=host autometrics/am:latest start :3000 49 | ``` 50 | 51 | This configuration will configure Prometheus within the container to monitor 52 | you application that is running outside of Docker on port `3000`. You can 53 | access Explorer by visiting `http://localhost:6789` in your browser. 54 | 55 |
56 | Advanced, non host network setup 57 | 58 | Alternatively, the following will not use the host network and instead will 59 | forward a port of the host to the container (Note that this won't allow 60 | Prometheus to reach your application running on the host): 61 | 62 | ``` 63 | docker run -it --rm -e LISTEN_ADDRESS=0.0.0.0:6789 -P autometrics/am:latest start example.com:3000 64 | ``` 65 | 66 | The extra argument ensures that the host is able to access `am` within the 67 | container. 68 |
69 | 70 | ### Docker Desktop 71 | 72 | If you are using Docker desktop for Mac or Windows then you won't need to use 73 | the host network, while still being able to communicate with your application 74 | running on the host. This can be done by using the following endpoint: 75 | `host.docker.internal` and adding the port to it: 76 | 77 | ``` 78 | docker run -it --rm -e LISTEN_ADDRESS=0.0.0.0:6789 -P autometrics/am:latest start host.docker.internal:3000 79 | ``` 80 | 81 | ## Use cases 82 | 83 | ### Running it as a proxy 84 | 85 | `am` comes with a command called `proxy`. This will allow you to forward traffic 86 | to a Prometheus instance. This command was specifically intended to be used to 87 | be run in a environment such as Kubernetes. 88 | 89 | ### Being able to easily remove am 90 | 91 | If you want to quickly try out `am` then you can easily run it using Docker or 92 | Podman. Because nothing it installed on your machine and only images and 93 | containers are downloaded, it is easy to remove it again. 94 | 95 | Be aware of the [limitations](#limitations) of running `am` in a container. 96 | 97 | ## Limitations 98 | 99 | ### Data persistence 100 | 101 | If you are running `am start` in a container than it will download the 102 | Prometheus and other components to the file system of the container. This means 103 | that if you remove the container then you will need to download the files again. 104 | The same applies to data produced by Prometheus. 105 | 106 | ### Networking 107 | 108 | In some situations it might become difficult, confusing, or even impossible to 109 | configure `am` to reach your application that you want to monitor. This is 110 | because the container will be isolated in its own network (unless configured 111 | differently) and it might also be caused by the container running within a VM. 112 | -------------------------------------------------------------------------------- /files/explorer/graph.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 9 | 10 | 11 | Autometrics Explorer - Prometheus 12 | 13 | 14 | 15 |
16 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /files/explorer/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 9 | 10 | 11 | 16 | Autometrics Explorer 17 | 18 | 19 | 20 |
21 | 22 | 25 | 30 | 31 | 32 | --------------------------------------------------------------------------------