├── .github ├── FUNDING.yml ├── dependabot.yml └── workflows │ ├── nightly.yml │ └── stable.yml ├── .gitignore ├── Dockerfile ├── Dockerfile.test-runner ├── LICENSE ├── README.md ├── SCCACHE.md ├── check_stable.py ├── justfile ├── renovate.json5 ├── test.sh └── test ├── dieselsqlitecrate ├── Cargo.toml └── src │ └── main.rs ├── hypertlscrate ├── Cargo.toml └── src │ └── main.rs ├── pkgconf ├── Cargo.toml └── src │ └── main.rs ├── plaincrate ├── Cargo.toml └── src │ └── main.rs ├── serdecrate ├── Cargo.toml └── src │ └── main.rs └── zlibcrate ├── Cargo.toml ├── data.txt └── src └── main.rs /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [clux] 4 | #patreon: # Replace with a single Patreon username 5 | #open_collective: # Replace with a single Open Collective username 6 | #ko_fi: # Replace with a single Ko-fi username 7 | #tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | #community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | #liberapay: # Replace with a single Liberapay username 10 | #issuehunt: # Replace with a single IssueHunt username 11 | #otechie: # Replace with a single Otechie username 12 | #custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 13 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | -------------------------------------------------------------------------------- /.github/workflows/nightly.yml: -------------------------------------------------------------------------------- 1 | name: nightly 2 | on: 3 | schedule: 4 | - cron: '0 10 * * *' # everyday at 10am 5 | push: 6 | branches: 7 | - 'main' 8 | pull_request: 9 | branches: 10 | - 'main' 11 | 12 | concurrency: 13 | group: ${{ github.ref }}-nightly 14 | cancel-in-progress: true 15 | 16 | env: 17 | REGISTRY_IMAGE: clux/muslrust 18 | 19 | jobs: 20 | build: 21 | name: 'Nightly Build' 22 | runs-on: 'ubuntu-latest' 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | platform: [linux/amd64, linux/arm64] 27 | include: 28 | - platform: linux/amd64 29 | arch: amd64 30 | target_dir: x86_64-unknown-linux-musl 31 | - platform: linux/arm64 32 | arch: arm64 33 | target_dir: aarch64-unknown-linux-musl 34 | steps: 35 | - uses: actions/checkout@v4 36 | - uses: extractions/setup-just@v3 37 | 38 | - name: Login to DockerHub 39 | uses: docker/login-action@v3 40 | if: ${{ github.repository_owner == 'clux' }} 41 | with: 42 | username: clux 43 | password: ${{ secrets.DOCKERHUB_TOKEN }} 44 | 45 | - name: Prepare 46 | run: | 47 | platform=${{ matrix.platform }} 48 | echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV 49 | 50 | - name: Docker meta 51 | id: meta 52 | uses: docker/metadata-action@v5 53 | with: 54 | images: ${{ env.REGISTRY_IMAGE }} 55 | 56 | - name: Set up QEMU 57 | uses: docker/setup-qemu-action@v3 58 | 59 | - name: Set up Docker Buildx 60 | uses: docker/setup-buildx-action@v3 61 | 62 | - name: Build nightly image 63 | id: build 64 | uses: docker/build-push-action@v6 65 | with: 66 | context: . 67 | platforms: ${{ matrix.platform }} 68 | labels: ${{ steps.meta.outputs.labels }} 69 | push: false 70 | load: true 71 | tags: rustmusl-temp 72 | build-args: | 73 | CHANNEL=nightly 74 | 75 | - name: Run tests 76 | shell: bash 77 | run: | 78 | docker buildx build --platform ${{ matrix.platform }} --output type=docker -t test-runner - < Dockerfile.test-runner 79 | TARGET_DIR=${{ matrix.target_dir }} PLATFORM=${{ matrix.platform }} just test-ci 80 | 81 | # The date/channel/version are expected to be the same on both architectures and are needed for the merge step. 82 | # We store them here since it makes the merge step a bit easier - it doesn't need to figure out which of the 83 | # architectures it can run (to extract the rust version). The problem is that it appears we can't run images 84 | # that were built by docker buildx (the build-push-action step) locally. They get pushed to dockerhub but are 85 | # only identifiable by their digest and it appears docker does not let us select an image that way. 86 | # Not the most elegant, but it works. 87 | - name: Store tag info 88 | shell: bash 89 | run: | 90 | mkdir -p /tmp/tags 91 | RUST_DATE="$(date +"%Y-%m-%d")" 92 | RUST_CHANNEL=nightly 93 | RUST_VER="$(docker run --platform ${{ matrix.platform }} rustmusl-temp rustc --version | grep -oE "[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]")" 94 | 95 | echo $RUST_DATE > /tmp/tags/rust-date 96 | echo $RUST_CHANNEL > /tmp/tags/rust-channel 97 | echo $RUST_VER > /tmp/tags/rust-ver 98 | 99 | - name: Tag and push 100 | shell: bash 101 | if: ${{ github.repository_owner == 'clux' }} 102 | run: | 103 | RUST_DATE=$(cat /tmp/tags/rust-date) 104 | RUST_CHANNEL=$(cat /tmp/tags/rust-channel) 105 | RUST_VER=$(cat /tmp/tags/rust-ver) 106 | 107 | TAG_NAME="${{ matrix.arch }}-${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE}" 108 | 109 | docker tag rustmusl-temp ${{ env.REGISTRY_IMAGE }}:$TAG_NAME 110 | docker push ${{ env.REGISTRY_IMAGE }}:$TAG_NAME 111 | 112 | - name: Upload tags 113 | uses: actions/upload-artifact@v4 114 | with: 115 | name: tags-${{matrix.arch}} 116 | path: /tmp/tags 117 | if-no-files-found: error 118 | retention-days: 1 119 | overwrite: true 120 | 121 | merge: 122 | runs-on: ubuntu-latest 123 | if: github.repository_owner == 'clux' 124 | needs: 125 | - build 126 | steps: 127 | 128 | - name: Download tags 129 | uses: actions/download-artifact@v4 130 | with: 131 | path: /tmp/tags 132 | pattern: tags-* 133 | merge-multiple: true 134 | 135 | - name: Set up Docker Buildx 136 | uses: docker/setup-buildx-action@v3 137 | 138 | - name: Docker meta 139 | id: meta 140 | uses: docker/metadata-action@v5 141 | with: 142 | images: ${{ env.REGISTRY_IMAGE }} 143 | 144 | - name: Login to Docker Hub 145 | uses: docker/login-action@v3 146 | with: 147 | username: clux 148 | password: ${{ secrets.DOCKERHUB_TOKEN }} 149 | 150 | - name: Create manifest list and push multi-platform images 151 | run: | 152 | RUST_DATE=$(cat /tmp/tags/rust-date) 153 | RUST_CHANNEL=$(cat /tmp/tags/rust-channel) 154 | RUST_VER=$(cat /tmp/tags/rust-ver) 155 | 156 | for tag in latest ${RUST_CHANNEL} ${RUST_CHANNEL}-${RUST_DATE} ${RUST_VER}-${RUST_CHANNEL} ${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE}; do 157 | docker buildx imagetools create -t ${{ env.REGISTRY_IMAGE }}:$tag \ 158 | ${{ env.REGISTRY_IMAGE }}:amd64-${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE} \ 159 | ${{ env.REGISTRY_IMAGE }}:arm64-${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE} 160 | done 161 | 162 | - name: Inspect image 163 | run: | 164 | docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:latest 165 | -------------------------------------------------------------------------------- /.github/workflows/stable.yml: -------------------------------------------------------------------------------- 1 | name: stable 2 | on: 3 | schedule: 4 | - cron: '0 12 * * *' # everyday at noon 5 | push: 6 | branches: 7 | - 'main' 8 | pull_request: 9 | branches: 10 | - 'main' 11 | 12 | concurrency: 13 | group: ${{ github.ref }}-stable 14 | cancel-in-progress: true 15 | 16 | env: 17 | REGISTRY_IMAGE: clux/muslrust 18 | 19 | jobs: 20 | check-stable: 21 | name: 'Check if workflow should continue' 22 | outputs: 23 | CONTINUE_BUILD: ${{ steps.check-stable-tag.outputs.CONTINUE_BUILD }} 24 | runs-on: 'ubuntu-latest' 25 | steps: 26 | - uses: actions/checkout@v4 27 | - name: 'Check if we need a new stable' 28 | id: check-stable-tag 29 | shell: bash 30 | run: | 31 | pip3 install --user toml 32 | if python3 check_stable.py; then 33 | echo 'Stable tag missing; running all build steps' 34 | echo 'CONTINUE_BUILD=YES' >> "${GITHUB_OUTPUT}" 35 | else 36 | echo 'Stable tag found; skipping all build steps' 37 | fi 38 | 39 | build: 40 | name: 'Stable Build' 41 | needs: [check-stable] 42 | if: ${{ needs.check-stable.outputs.CONTINUE_BUILD == 'YES' }} 43 | runs-on: 'ubuntu-latest' 44 | strategy: 45 | fail-fast: false 46 | matrix: 47 | platform: [linux/amd64, linux/arm64] 48 | include: 49 | - platform: linux/amd64 50 | arch: amd64 51 | target_dir: x86_64-unknown-linux-musl 52 | - platform: linux/arm64 53 | arch: arm64 54 | target_dir: aarch64-unknown-linux-musl 55 | steps: 56 | - uses: actions/checkout@v4 57 | - uses: extractions/setup-just@v3 58 | 59 | - name: Login to DockerHub 60 | uses: docker/login-action@v3 61 | if: ${{ github.repository_owner == 'clux' }} 62 | with: 63 | username: clux 64 | password: ${{ secrets.DOCKERHUB_TOKEN }} 65 | 66 | - name: Docker meta 67 | id: meta 68 | uses: docker/metadata-action@v5 69 | with: 70 | images: ${{ env.REGISTRY_IMAGE }} 71 | 72 | - name: Set up QEMU 73 | uses: docker/setup-qemu-action@v3 74 | 75 | - name: Set up Docker Buildx 76 | uses: docker/setup-buildx-action@v3 77 | 78 | - name: Build stable image 79 | id: build 80 | uses: docker/build-push-action@v6 81 | with: 82 | context: . 83 | platforms: ${{ matrix.platform }} 84 | labels: ${{ steps.meta.outputs.labels }} 85 | push: false 86 | load: true 87 | tags: rustmusl-temp 88 | build-args: | 89 | CHANNEL=stable 90 | 91 | - name: Run tests 92 | shell: bash 93 | run: | 94 | docker buildx build --platform ${{ matrix.platform }} --output type=docker -t test-runner - < Dockerfile.test-runner 95 | TARGET_DIR=${{ matrix.target_dir }} PLATFORM=${{ matrix.platform }} just test-ci 96 | 97 | # The date/channel/version are expected to be the same on both architectures and are needed for the merge step. 98 | # We store them here since it makes the merge step a bit easier - it doesn't need to figure out which of the 99 | # architectures it can run (to extract the rust version). The problem is that it appears we can't run images 100 | # that were built by docker buildx (the build-push-action step) locally. They get pushed to dockerhub but are 101 | # only identifiable by their digest and it appears docker does not let us select an image that way. 102 | # Not the most elegant, but it works. 103 | - name: Store tag info 104 | shell: bash 105 | run: | 106 | mkdir -p /tmp/tags 107 | RUST_DATE="$(date +"%Y-%m-%d")" 108 | RUST_CHANNEL=stable 109 | RUST_VER="$(docker run --platform ${{ matrix.platform }} rustmusl-temp rustc --version | grep -oE "[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]")" 110 | 111 | echo $RUST_DATE > /tmp/tags/rust-date 112 | echo $RUST_CHANNEL > /tmp/tags/rust-channel 113 | echo $RUST_VER > /tmp/tags/rust-ver 114 | 115 | - name: Tag and push 116 | if: ${{ github.repository_owner == 'clux' }} 117 | shell: bash 118 | run: | 119 | RUST_DATE=$(cat /tmp/tags/rust-date) 120 | RUST_CHANNEL=$(cat /tmp/tags/rust-channel) 121 | RUST_VER=$(cat /tmp/tags/rust-ver) 122 | 123 | TAG_NAME="${{ matrix.arch }}-${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE}" 124 | 125 | docker tag rustmusl-temp ${{ env.REGISTRY_IMAGE }}:$TAG_NAME 126 | docker push ${{ env.REGISTRY_IMAGE }}:$TAG_NAME 127 | 128 | # TODO: want to do this, but need digest, which might not be trivial to get outside build-push-action 129 | # - name: Attest docker.io 130 | # if: ${{ github.repository_owner == 'clux' }} 131 | # uses: actions/attest-build-provenance@v2.3.0 132 | # with: 133 | # subject-name: docker.io/${{ env.REGISTRY_IMAGE }} 134 | # subject-digest: ${{ steps.push_stable.outputs.digest }} 135 | # push-to-registry: true 136 | 137 | - name: Upload tags 138 | uses: actions/upload-artifact@v4 139 | with: 140 | name: tags-${{matrix.arch}} 141 | path: /tmp/tags 142 | if-no-files-found: error 143 | retention-days: 1 144 | overwrite: true 145 | 146 | merge: 147 | name: 'Stable merge' 148 | runs-on: ubuntu-latest 149 | if: github.repository_owner == 'clux' 150 | needs: 151 | - build 152 | steps: 153 | - name: Download tags 154 | uses: actions/download-artifact@v4 155 | with: 156 | path: /tmp/tags 157 | pattern: tags-* 158 | merge-multiple: true 159 | 160 | - name: Set up Docker Buildx 161 | uses: docker/setup-buildx-action@v3 162 | 163 | - name: Docker meta 164 | id: meta 165 | uses: docker/metadata-action@v5 166 | with: 167 | images: ${{ env.REGISTRY_IMAGE }} 168 | 169 | - name: Login to Docker Hub 170 | uses: docker/login-action@v3 171 | with: 172 | username: clux 173 | password: ${{ secrets.DOCKERHUB_TOKEN }} 174 | 175 | - name: Create manifest list and push multi-platform images 176 | shell: bash 177 | run: | 178 | RUST_DATE=$(cat /tmp/tags/rust-date) 179 | RUST_CHANNEL=$(cat /tmp/tags/rust-channel) 180 | RUST_VER=$(cat /tmp/tags/rust-ver) 181 | 182 | # The two already published image tags to associate additional tags to: 183 | AMD64="${{ env.REGISTRY_IMAGE }}:amd64-${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE}" 184 | ARM64="${{ env.REGISTRY_IMAGE }}:arm64-${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE}" 185 | 186 | EXTRA_TAGS=( 187 | "${RUST_CHANNEL}" 188 | "${RUST_CHANNEL}-${RUST_DATE}" 189 | "${RUST_VER}-${RUST_CHANNEL}" 190 | "${RUST_VER}-${RUST_CHANNEL}-${RUST_DATE}" 191 | ) 192 | 193 | # Assign each tag to the two source image tags: 194 | for TAG in "${EXTRA_TAGS[@]}"; do 195 | docker buildx imagetools create --tag "${{ env.REGISTRY_IMAGE }}:${TAG}" "${AMD64}" "${ARM64}" 196 | done 197 | 198 | - name: Inspect image 199 | run: | 200 | docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:latest 201 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .build 2 | 3 | test/*crate/Cargo.lock 4 | test/*crate/target 5 | test/zlibcrate/data.tar.gz 6 | test/zlibcrate/output 7 | test/pkgconf/Cargo.lock 8 | test/pkgconf/target 9 | venv 10 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax=docker/dockerfile:1 2 | ARG BASE_IMAGE=ubuntu:noble 3 | 4 | # Mapping ARM64 / AMD64 naming conventions to equivalent `uname -a` output (build target specific): 5 | FROM ${BASE_IMAGE} AS base-amd64 6 | ENV DOCKER_TARGET_ARCH=x86_64 7 | FROM ${BASE_IMAGE} AS base-arm64 8 | ENV DOCKER_TARGET_ARCH=aarch64 9 | 10 | FROM base-${TARGETARCH} AS base 11 | SHELL ["/bin/bash", "-eux", "-o", "pipefail", "-c"] 12 | # Required packages: 13 | # - musl-dev, musl-tools - the musl toolchain 14 | # - curl, g++, make, pkgconf, cmake - for fetching and building third party libs 15 | # - ca-certificates - peer verification of downloads 16 | # - git - cargo builds in user projects 17 | # - file - needed by rustup.sh install 18 | # - automake autoconf libtool - support crates building C deps as part cargo build 19 | # NB: does not include cmake atm 20 | RUN < `make` instructions. We are unlikely to include other C libraries herein unless they are very popular. 168 | 169 | ### Extra Rustup components 170 | 171 | You can install extra components distributed via Rustup like normal: 172 | 173 | ```sh 174 | rustup component add clippy 175 | ``` 176 | 177 | ### Binaries distributed via Cargo 178 | 179 | If you need to install a binary crate such as [ripgrep](https://github.com/BurntSushi/ripgrep) on a CI build image, you need to build it against the GNU toolchain (see [#37](https://github.com/clux/muslrust/issues/37#issuecomment-357314202)): 180 | 181 | ```sh 182 | CARGO_BUILD_TARGET=x86_64-unknown-linux-gnu cargo install ripgrep 183 | ``` 184 | 185 | ## Alternatives 186 | 187 | - `rustup target add x86_64-unknown-linux-musl` works locally when not needing [C libraries](#c-libraries) 188 | - [official rust image](https://hub.docker.com/_/rust) can `target add` and easily cross-build when not needing [C libraries](#c-libraries) 189 | - [cross](https://github.com/japaric/cross) can cross-build different embedded targets 190 | -------------------------------------------------------------------------------- /SCCACHE.md: -------------------------------------------------------------------------------- 1 | ## muslrust + sccache 2 | 3 | The `muslrust` image includes `sccache`, so you can use it easily to try to improve build times. 4 | 5 | To use it, set `RUSTC_WRAPPER` to `path/to/sccache`, and set some environment variables to configure it. 6 | 7 | * `SCCACHE_DIR` is the directory that sccache will use to cache build artifacts 8 | * `SCCACHE_CACHE_SIZE` indicates the maximum size of the cache. `SCCACHE` will evict items when the limit is exceeded. 9 | * `SCCACHE_ERROR_LOG` is a path to a text file, which you can inspect if there are errors. 10 | * `CARGO_INCREMENTAL` should be set to `0` whenever using `sccache`. (modern versions of `sccache` may set this to 0 themselves, I'm not sure tbh.) 11 | 12 | `sccache --show-stats` can be used to print stats for cache hits, misses etc. There is also an command to zero the stats, 13 | but it is usually unnecessary to do so in the context of this image, because `sccache` does not persist the stats to disk, 14 | and the process terminates when your build completes. 15 | 16 | Here's an example `docker run` command: 17 | 18 | ``` 19 | if [ -z $MOUNT_ROOT ]; then 20 | MOUNT_ROOT="$HOME/.muslrust" 21 | fi 22 | 23 | POST_BUILD_CMD=chown -R $(id -u) ./target /root/.cargo/registry /root/sccache 24 | 25 | docker run -v $PWD:/volume \ 26 | -v "$MOUNT_ROOT/cargo/registry":/root/.cargo/registry \ 27 | -v "$MOUNT_ROOT/sccache":/root/sccache \ 28 | --env CARGO_INCREMENTAL=0 \ 29 | --env RUSTC_WRAPPER=/usr/local/bin/sccache \ 30 | --env SCCACHE_DIR=/root/sccache \ 31 | --env SCCACHE_CACHE_SIZE="${SCCACHE_CACHE_SIZE:-5G}" \ 32 | --env SCCACHE_ERROR_LOG=/tmp/sccache.log \ 33 | --rm -t clux/muslrust:stable sh -c "AR=ar cargo build --release --locked && /usr/local/bin/sccache --show-stats && ${POST_BUILD_CMD}" 34 | ``` 35 | 36 | When you run this, you should see a report from sccache that looks something like the following: 37 | 38 | ``` 39 | Finished `release` profile [optimized + debuginfo] target(s) in 2m 27s 40 | Compile requests 542 41 | Compile requests executed 488 42 | Cache hits 484 43 | Cache hits (C/C++) 36 44 | Cache hits (Rust) 448 45 | Cache misses 1 46 | Cache misses (Rust) 1 47 | Cache timeouts 0 48 | Cache read errors 0 49 | Forced recaches 0 50 | Cache write errors 0 51 | Compilation failures 3 52 | Cache errors 0 53 | Non-cacheable compilations 0 54 | Non-cacheable calls 52 55 | Non-compilation calls 2 56 | Unsupported compiler calls 0 57 | Average cache write 0.007 s 58 | Average compiler 1.568 s 59 | Average cache read hit 0.002 s 60 | Failed distributed compilations 0 61 | 62 | Non-cacheable reasons: 63 | unknown source language 24 64 | crate-type 22 65 | - 5 66 | -E 1 67 | 68 | Cache location Local disk: "/root/sccache" 69 | Use direct/preprocessor mode? yes 70 | Version (client) 0.8.0 71 | Cache size 494 MiB 72 | Max cache size 10 GiB 73 | ``` 74 | 75 | The above with a warm `sccache` cache, but a clean `target` directory. How many cache hits you get depends on many factors. 76 | There are [a number of things you can do](https://github.com/mozilla/sccache?tab=readme-ov-file#known-caveats) to change your usage, or your rust code, to get more cache hits. 77 | 78 | ### Mounting and caching directories 79 | 80 | In the above example, we're mounting `/root/.cargo/registry` and `/root/sccache` to the host machine, because these are directories we want to cache across invocations. 81 | 82 | You could use docker named volumes instead of actually mounting them to the host filesystem if you like, but if you plan to use this in github actions, 83 | it's better to actually mount them, and then cache those directories in github actions. 84 | 85 | Note that if you are running this locally, neither of these directories is going to grow without bound, because `cargo` has a gc internally for registry stuff, and `sccache` evicts cached files 86 | in an LRU fashion when the cache exceeds `SCCACHE_CACHE_SIZE`. So storing this in a home directory is a reasonably safe default. 87 | 88 | Caching this correctly in github actions is pretty simple. 89 | 90 | For "normal" rust builds (invoking cargo from gha directly, not using something like muslrust image or sccache), it's highly recommendable to use 91 | something like the [`rust-cache` action](https://github.com/Swatinem/rust-cache) and not re-invent the wheel, beause that is going to do things like, try to cache all builds of dependencies intelligently, 92 | check the toolchain and make that part of the cache key, etc. etc. 93 | 94 | When using `muslrust` with `sccache`, `sccache` is essentially going to do all that work. The `SCCACHE_DIR` is safe to share across OS's, architectures, toolchains, etc, because all of that data goes 95 | into the hash keys computed by `sccache`. 96 | The `.cargo/registry` is also not dependent on your toolchain or OS or anything like that. Also `rust-cache` will attempt to figure out your `cargo` and `rustc` versions by interrogating whatever is in the path, 97 | but that won't actually pick up the stuff in the `muslrust` image. So `rust-cache` is not the right choice here, and we can and should just use something very simple like 98 | 99 | ``` 100 | - name: Cache muslrust cargo registry and sccache dir 101 | # https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows 102 | uses: actions/cache@v3 103 | with: 104 | path: /tmp/muslrust 105 | key: v1-sccache 106 | restore-keys: v1-sccache 107 | ``` 108 | 109 | and set `MOUNT_ROOT` to `/tmp/muslrust` in CI. (`$HOME` may or may not work correctly in gha). 110 | 111 | The only reason to get fancier with the gha cache keys here is if you have lots of jobs using this and for some reason you don't expect them to be able to share artifacts. 112 | For example, if you are using `muslrust:stable` and `muslrust:nightly`, probably nothing at all can be shared between these builds so you might as well use separate github cache keys for those. 113 | That's an efficiency consideration -- there's no point to have the `muslrust:nightly` job download a bunch of cached artifacts from the `muslrust:stable` job that there's no way it can get hits on, 114 | it will just slow down your CI a bit because it downloads stuff it doesn't need. But it's not a correctness consideration. Even if the `muslrust:nightly` stuff got into your `SCCACHE_DIR` on a job 115 | using `muslrust:stable`, it shouldn't cause a bad build, because `sccache` caching is sound. 116 | 117 | Note that per docu, github has a repository limit of 10G in total for all caches created this way. I suggest using 5G as the `SCCACHE_CACHE_SIZE` and leaving some G's for the `.cargo/registry`, but ymmv. 118 | 119 | ### Post-build command 120 | 121 | As described in the main [`README.md`](./README.md), on linux the build is going to run as root in the `muslrust` image and so any files it produces will be owned by root, if they are mounted into the container. 122 | For several reasons that can become annoying, and a quick `chown` fixes it. 123 | 124 | Here we're adding a `POST_BUILD_COMMAND` that changes ownership not only for the `target` directory, but also the cargo registry and sccache directories. This is because the github `actions/cache` action will fail 125 | to save and restore files owned by root. 126 | 127 | On a mac, docker works differently, so if you are using the example command there, the files won't actually be owned by root, and also the `chown` command will be very slow. So on mac it is better to either skip 128 | the `POST_BUILD_CMD`, or you could modify it so that it actually tests if we are root before doing the `chown`. 129 | -------------------------------------------------------------------------------- /check_stable.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # check_stable.py 3 | # 4 | # Retrieve latest stable version from static.rust-lang.org 5 | # Compare the stable version to ensure we have a corresponding docker tag 6 | # 7 | # If we have not built it, print the version we need to build and exit 0 8 | # If we have built it, exit 1 9 | 10 | import urllib.request as urllib 11 | import json 12 | import toml 13 | import sys 14 | 15 | # Dockerhub repo to compare rust-lang release with 16 | DOCKERHUB_REPO="clux/muslrust" 17 | 18 | def rust_stable_version(): 19 | """Retrieve the latest rust stable version from static.rust-lang.org""" 20 | url = 'https://static.rust-lang.org/dist/channel-rust-stable.toml' 21 | req = urllib.urlopen(url) 22 | data = toml.loads(req.read().decode("utf-8")) 23 | req.close() 24 | return data['pkg']['rust']['version'].split()[0] 25 | 26 | def tag_exists(tag): 27 | """Retrieve our built tags and check we have built a given one""" 28 | (namespace, repo) = DOCKERHUB_REPO.split("/") 29 | url = f'https://registry.hub.docker.com/v2/namespaces/{namespace}/repositories/{repo}/tags' 30 | req = urllib.urlopen(url) 31 | data = json.loads(req.read()) 32 | req.close() 33 | for x in data['results']: 34 | if x['name'] == tag: 35 | return True 36 | return False 37 | 38 | 39 | if __name__ == '__main__': 40 | latest_stable = rust_stable_version() 41 | stable_tag = f'{latest_stable}-stable' 42 | if tag_exists(stable_tag): 43 | print(f'tag {stable_tag} already built') 44 | sys.exit(1) 45 | else: 46 | print(f'need to build {latest_stable}') 47 | sys.exit(0) 48 | -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | 2 | [private] 3 | default: 4 | @just --list --unsorted --color=always 5 | 6 | _build channel platform: 7 | docker build --build-arg CHANNEL="{{channel}}" --platform="{{platform}}" -t rustmusl-temp . 8 | # Build the stable x86 container 9 | build-stable-amd: (_build "stable" "linux/amd64") 10 | # Build the nightly x86 container 11 | build-nightly-amd: (_build "nightly" "linux/amd64") 12 | # Build the stable arm container 13 | build-stable-arm: (_build "stable" "linux/arm64") 14 | # Build the nightly arm container 15 | build-nightly-arm: (_build "nightly" "linux/arm64") 16 | 17 | # Shell into the built container 18 | run: 19 | docker run -v $PWD/test:/volume -w /volume -it rustmusl-temp /bin/bash 20 | 21 | # Build test runner 22 | test-setup: 23 | docker build -t test-runner . -f Dockerfile.test-runner 24 | 25 | # Test an individual crate against built container 26 | _t crate: 27 | ./test.sh {{crate}} 28 | 29 | # Test an individual crate locally using env vars set by _t_amd or t_arm 30 | _ti crate: 31 | # poor man's environment multiplex 32 | just _t_{{ os() }}_{{ arch() }} {{crate}} 33 | 34 | # when running locally we can use one of these instead of _t 35 | _t_linux_x86_64 crate: 36 | #!/bin/bash 37 | export PLATFORM="linux/amd64" 38 | export TARGET_DIR="x86_64-unknown-linux-musl" 39 | ./test.sh {{crate}} 40 | _t_macos_aarch64 crate: 41 | #!/bin/bash 42 | export PLATFORM="linux/arm64" 43 | export TARGET_DIR="aarch64-unknown-linux-musl" 44 | ./test.sh {{crate}} 45 | 46 | # Test all crates against built container locally 47 | test: (_ti "plain") (_ti "serde") (_ti "zlib") (_ti "hypertls") (_ti "dieselsqlite") 48 | # Test all crates against built container in ci (inheriting set PLATFORM/TARGET_DIR/AR vars) 49 | test-ci: (_t "plain") (_t "serde") (_t "zlib") (_t "hypertls") (_t "dieselsqlite") 50 | 51 | # Cleanup everything 52 | clean: clean-docker clean-tests 53 | 54 | # Cleanup docker images with clux/muslrus_t name 55 | clean-docker: 56 | docker images clux/muslrust -q | xargs -r docker rmi -f 57 | docker builder prune --all 58 | 59 | # Cleanup test artifacts 60 | clean-tests: 61 | sudo find . -iname Cargo.lock -exec rm {} \; 62 | sudo find . -mindepth 3 -maxdepth 3 -name target -exec rm -rf {} \; 63 | sudo rm -f test/dieselsqlitecrate/main.db 64 | -------------------------------------------------------------------------------- /renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: "https://docs.renovatebot.com/renovate-schema.json", 3 | extends: [ 4 | "config:base", 5 | ":gitSignOff", 6 | ":disableDependencyDashboard", 7 | "customManagers:dockerfileVersions", // _VERSION vars in Dockerfiles 8 | ], 9 | packageRules: [], 10 | } 11 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex -o pipefail 3 | 4 | # Common vars: 5 | CRATE_NAME="${1}crate" 6 | CRATE_PATH="./test/${CRATE_NAME}" 7 | 8 | # Build and verify successful static compilation of a crate: 9 | function docker_build() { 10 | echo "Target dir: ${TARGET_DIR}" 11 | echo "Platform: ${PLATFORM}" 12 | 13 | # NB: add -vv to cargo build when debugging 14 | docker run --rm \ 15 | --env RUST_BACKTRACE=1 \ 16 | --volume "${CRATE_PATH}:/volume" \ 17 | --volume cargo-cache:/opt/cargo/registry \ 18 | --platform "${PLATFORM}" \ 19 | rustmusl-temp \ 20 | cargo build 21 | 22 | # Verify the build artifact works and is statically linked: 23 | # (A container is used for `ldd` so that a non-native platform can also be tested) 24 | local CRATE_ARTIFACT="./target/${TARGET_DIR}/debug/${CRATE_NAME}" 25 | docker run --rm \ 26 | --env RUST_BACKTRACE=1 \ 27 | --volume "${CRATE_PATH}:/volume" \ 28 | --workdir /volume \ 29 | --platform "${PLATFORM}" \ 30 | test-runner \ 31 | bash -ex -c " 32 | '${CRATE_ARTIFACT}' 33 | ldd '${CRATE_ARTIFACT}' 2>&1 \ 34 | | grep -qE 'not a dynamic|statically linked' \ 35 | && echo '${CRATE_NAME} is a static executable' 36 | " 37 | } 38 | 39 | # Reference - Helpers to locally compare builds from alternative images (x86_64 arch only): 40 | # - https://github.com/emk/rust-musl-builder (ss1, abandoned) 41 | # - https://gitlab.com/rust_musl_docker/image (ss1, abandoned) 42 | # - https://github.com/BlackDex/rust-musl (active, supports ssl 3.X) 43 | function docker_build_other_image() { 44 | docker run --rm -it \ 45 | --env RUST_BACKTRACE=1 \ 46 | --volume "${CRATE_PATH}:/home/rust/src" \ 47 | --volume cargo-cache:/home/rust/.cargo \ 48 | blackdex/rust-musl:x86_64-musl-stable \ 49 | cargo build -vv 50 | 51 | check_crate_build_locally 52 | } 53 | 54 | # Verify the build artifact works and is statically linked: 55 | function check_crate_build_locally() { 56 | local CRATE_ARTIFACT="${CRATE_PATH}/target/x86_64-unknown-linux-musl/debug/${CRATE_NAME}" 57 | 58 | "${CRATE_ARTIFACT}" 59 | ldd "${CRATE_ARTIFACT}" 2>&1 \ 60 | | grep -qE 'not a dynamic|statically linked' \ 61 | && echo "${CRATE_NAME} is a static executable" 62 | } 63 | 64 | docker_build 65 | -------------------------------------------------------------------------------- /test/dieselsqlitecrate/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["clux "] 3 | name = "dieselsqlitecrate" 4 | version = "0.1.0" 5 | 6 | [dependencies] 7 | diesel = { version = "2.2.*", features = ["sqlite"] } 8 | -------------------------------------------------------------------------------- /test/dieselsqlitecrate/src/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate diesel; 3 | 4 | mod schema { 5 | table! { 6 | posts (id) { 7 | id -> Int4, 8 | title -> Varchar, 9 | body -> Text, 10 | published -> Bool, 11 | } 12 | } 13 | } 14 | 15 | mod models { 16 | use schema::posts; 17 | #[derive(Queryable)] 18 | pub struct Post { 19 | pub id: i32, 20 | pub title: String, 21 | pub body: String, 22 | pub published: bool, 23 | } 24 | 25 | // apparently this can be done without heap storage, but lifetimes spread far.. 26 | #[derive(Insertable)] 27 | #[diesel(table_name = posts)] 28 | pub struct NewPost { 29 | pub title: String, 30 | pub body: String, 31 | } 32 | } 33 | 34 | use diesel::prelude::*; 35 | use diesel::sqlite::SqliteConnection; 36 | 37 | fn main() { 38 | let database_url = std::env::var("DATABASE_URL").unwrap_or("main.db".into()); 39 | SqliteConnection::establish(&database_url).unwrap(); 40 | } 41 | -------------------------------------------------------------------------------- /test/hypertlscrate/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["clux "] 3 | name = "hypertlscrate" 4 | version = "0.2.0" 5 | edition = "2024" 6 | 7 | [dependencies] 8 | hyper = { version = "1.6", features=["client", "http1"] } 9 | tokio = { version = "1.45.0", features=["macros", "io-std", "io-util", "rt-multi-thread"] } 10 | hyper-rustls = "0.27.5" 11 | http-body-util = "0.1.3" 12 | hyper-util = "0.1.12" 13 | -------------------------------------------------------------------------------- /test/hypertlscrate/src/main.rs: -------------------------------------------------------------------------------- 1 | //- Example from https://docs.rs/hyper-rustls/latest/hyper_rustls/ 2 | use http_body_util::Empty; 3 | use hyper::body::Bytes; 4 | use hyper::http::StatusCode; 5 | use hyper_util::client::legacy::Client; 6 | use hyper_util::rt::TokioExecutor; 7 | 8 | #[tokio::main] 9 | async fn main() -> Result<(), Box> { 10 | let url = ("https://raw.githubusercontent.com/clux/muslrust/master/README.md") 11 | .parse() 12 | .unwrap(); 13 | 14 | let https = hyper_rustls::HttpsConnectorBuilder::new() 15 | .with_native_roots() 16 | .expect("no native root CA certificates found") 17 | .https_only() 18 | .enable_http1() 19 | .build(); 20 | 21 | let client: Client<_, Empty> = Client::builder(TokioExecutor::new()).build(https); 22 | 23 | let res = client.get(url).await.unwrap(); 24 | assert_eq!(res.status(), StatusCode::OK); 25 | 26 | Ok(()) 27 | } 28 | -------------------------------------------------------------------------------- /test/pkgconf/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["clux "] 3 | name = "pkgconf" 4 | version = "0.1.0" 5 | 6 | [dependencies] 7 | pkg-config = "*" 8 | -------------------------------------------------------------------------------- /test/pkgconf/src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate pkg_config; 2 | 3 | use std::env; 4 | use std::process; 5 | 6 | fn main() { 7 | if let Ok(info) = pkg_config::find_library("openssl") { 8 | let paths = env::join_paths(info.include_paths).unwrap(); 9 | println!("cargo:include={}", paths.to_str().unwrap()); 10 | process::exit(0); 11 | } 12 | process::exit(1); 13 | } 14 | -------------------------------------------------------------------------------- /test/plaincrate/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["clux "] 3 | name = "plaincrate" 4 | version = "0.1.0" 5 | edition = "2024" 6 | 7 | [dependencies] 8 | rand = "*" 9 | -------------------------------------------------------------------------------- /test/plaincrate/src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate rand; 2 | 3 | fn main() { 4 | let nr = rand::random::<(u32)>(); 5 | println!("Hello, visitor number {}", nr); 6 | } 7 | -------------------------------------------------------------------------------- /test/serdecrate/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["clux "] 3 | name = "serdecrate" 4 | version = "0.1.0" 5 | edition = "2024" 6 | 7 | [dependencies] 8 | serde = "1.0.15" 9 | serde_derive = "1.0.15" 10 | serde_json = "1.0.4" 11 | tikv-jemallocator = "0.5.4" 12 | -------------------------------------------------------------------------------- /test/serdecrate/src/main.rs: -------------------------------------------------------------------------------- 1 | use serde_derive::{Deserialize, Serialize}; 2 | use serde_json; 3 | 4 | use tikv_jemallocator::Jemalloc; 5 | #[global_allocator] 6 | static GLOBAL: Jemalloc = Jemalloc; 7 | 8 | #[derive(Serialize, Deserialize, Debug)] 9 | struct Point { 10 | x: i32, 11 | y: i32, 12 | } 13 | 14 | fn main() { 15 | let point = Point { x: 1, y: 2 }; 16 | 17 | // Convert the Point to a JSON string. 18 | let serialized = serde_json::to_string(&point).unwrap(); 19 | 20 | // Prints serialized = {"x":1,"y":2} 21 | println!("serialized = {}", serialized); 22 | 23 | // Convert the JSON string back to a Point. 24 | let deserialized: Point = serde_json::from_str(&serialized).unwrap(); 25 | 26 | // Prints deserialized = Point { x: 1, y: 2 } 27 | println!("deserialized = {:?}", deserialized); 28 | } 29 | -------------------------------------------------------------------------------- /test/zlibcrate/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["clux "] 3 | name = "zlibcrate" 4 | version = "0.1.0" 5 | edition = "2024" 6 | 7 | [dependencies] 8 | flate2 = "*" 9 | tar = "*" 10 | -------------------------------------------------------------------------------- /test/zlibcrate/data.txt: -------------------------------------------------------------------------------- 1 | hi 2 | -------------------------------------------------------------------------------- /test/zlibcrate/src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate tar; 2 | extern crate flate2; 3 | 4 | use std::io::{self, Read}; 5 | use std::fs::{self, File}; 6 | use std::path::{Path, PathBuf}; 7 | use std::env; 8 | use std::process; 9 | 10 | fn decompress(tarpath: PathBuf, extract_path: PathBuf) -> io::Result<()> { 11 | use flate2::read::GzDecoder; 12 | use tar::Archive; 13 | 14 | let tarball = fs::File::open(tarpath)?; 15 | let decompressed = GzDecoder::new(tarball); 16 | let mut archive = Archive::new(decompressed); 17 | 18 | fs::create_dir_all(&extract_path)?; 19 | archive.unpack(&extract_path)?; 20 | 21 | Ok(()) 22 | } 23 | 24 | fn compress(input_file: &str, output_file: PathBuf) -> io::Result<()> { 25 | use flate2::write::GzEncoder; 26 | use flate2::Compression; 27 | use tar::Builder; 28 | 29 | let file = File::create(&output_file)?; 30 | let mut encoder = GzEncoder::new(file, Compression::default()); 31 | let mut builder = Builder::new(&mut encoder); 32 | 33 | builder.append_path(input_file)?; 34 | 35 | // scope Drop's builder, then encoder 36 | Ok(()) 37 | } 38 | 39 | fn verify(res: io::Result<()>) { 40 | let _ = res.map_err(|e| { 41 | println!("error: {}", e); 42 | process::exit(1); 43 | }); 44 | } 45 | 46 | fn main() { 47 | let pwd = env::current_dir().unwrap(); 48 | let data = "./data.txt"; 49 | let tarpath = Path::new(&pwd).join("data.tar.gz"); 50 | let extractpath = Path::new(&pwd).join("output"); 51 | verify(compress(data, tarpath.clone())); 52 | println!("Compressed data"); 53 | 54 | verify(decompress(tarpath, extractpath)); 55 | println!("Decompressed data"); 56 | 57 | let mut f = File::open(Path::new(&pwd).join("output").join("data.txt")).unwrap(); 58 | let mut text = String::new(); 59 | f.read_to_string(&mut text).unwrap(); 60 | 61 | assert_eq!(&text, "hi\n"); 62 | println!("Verified data"); 63 | } 64 | --------------------------------------------------------------------------------