├── .github └── workflows │ ├── backblaze.yml.disable │ └── ci.yml ├── .gitignore ├── .hlint.yaml ├── .stylish-haskell.yaml ├── .vscode └── tasks.json ├── CHANGELOG.md ├── LICENSE ├── README.md ├── Setup.hs ├── app ├── App │ ├── Amazonka.hs │ ├── Commands.hs │ ├── Commands │ │ ├── Debug.hs │ │ ├── Debug │ │ │ ├── S3.hs │ │ │ └── S3 │ │ │ │ └── Cp.hs │ │ ├── Options │ │ │ ├── Parser.hs │ │ │ └── Types.hs │ │ ├── Plan.hs │ │ ├── SyncFromArchive.hs │ │ ├── SyncToArchive.hs │ │ └── Version.hs │ ├── Run.hs │ ├── Static.hs │ └── Static │ │ ├── Base.hs │ │ ├── Posix.hs │ │ └── Windows.hs └── Main.hs ├── cabal-cache.cabal ├── cabal.project ├── src └── HaskellWorks │ └── CabalCache │ ├── AWS │ ├── Env.hs │ ├── Error.hs │ ├── S3.hs │ └── S3 │ │ └── URI.hs │ ├── AppError.hs │ ├── Concurrent │ ├── DownloadQueue.hs │ ├── Fork.hs │ └── Type.hs │ ├── Core.hs │ ├── Data │ └── List.hs │ ├── Error.hs │ ├── Exit.hs │ ├── GhcPkg.hs │ ├── Hash.hs │ ├── IO │ ├── Console.hs │ ├── File.hs │ ├── Lazy.hs │ └── Tar.hs │ ├── Location.hs │ ├── Metadata.hs │ ├── Options.hs │ ├── Store.hs │ ├── Text.hs │ ├── Topology.hs │ ├── Types.hs │ ├── URI.hs │ └── Version.hs └── test ├── HaskellWorks └── CabalCache │ ├── AwsSpec.hs │ ├── IntegrationSpec.hs │ ├── LocationSpec.hs │ └── QuerySpec.hs ├── Spec.hs ├── Test └── Base.hs ├── test-missing-all.sh └── test-missing.sh /.github/workflows/backblaze.yml.disable: -------------------------------------------------------------------------------- 1 | name: Binaries (Backblaze) 2 | 3 | defaults: 4 | run: 5 | shell: bash 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | pull_request: 12 | 13 | jobs: 14 | build: 15 | runs-on: ${{ matrix.os }} 16 | 17 | environment: rwld@cache.haskellworks.io 18 | 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | ghc: ["8.10.7"] 23 | os: [ubuntu-latest] 24 | 25 | steps: 26 | - uses: actions/checkout@v2 27 | 28 | - uses: haskell/actions/setup@v2 29 | id: setup-haskell 30 | with: 31 | ghc-version: ${{ matrix.ghc }} 32 | cabal-version: 3.8.1.0 33 | 34 | - name: Set some window specific things 35 | if: matrix.os == 'windows-latest' 36 | run: echo 'EXE_EXT=.exe' >> $GITHUB_ENV 37 | 38 | - name: Configure project 39 | run: | 40 | cabal configure --enable-tests --enable-benchmarks --write-ghc-environment-files=ghc8.4.4+ 41 | cabal build all --dry-run 42 | 43 | - name: Cabal cache over S3 44 | uses: action-works/cabal-cache-s3@v1 45 | env: 46 | AWS_ACCESS_KEY_ID: ${{ secrets.BACKBLAZE_AWS_ACCESS_KEY_ID }} 47 | AWS_SECRET_ACCESS_KEY: ${{ secrets.BACKBLAZE_AWS_SECRET_ACCESS_KEY }} 48 | with: 49 | region: us-west-2 50 | dist-dir: dist-newstyle 51 | store-path: ${{ steps.setup-haskell.outputs.cabal-store }} 52 | threads: 16 53 | archive-uri: ${{ secrets.BACKBLAZE_BINARY_CACHE_URI }} 54 | host-name: s3.us-west-004.backblazeb2.com 55 | host-port: 443 56 | host-ssl: "True" 57 | skip: "${{ secrets.BINARY_CACHE_URI == '' }}" 58 | 59 | - name: Cabal cache over HTTPS 60 | uses: action-works/cabal-cache-s3@v1 61 | with: 62 | dist-dir: dist-newstyle 63 | store-path: ${{ steps.setup-haskell.outputs.cabal-store }} 64 | threads: 16 65 | archive-uri: https://cache.haskellworks.io/archive 66 | 67 | - name: Build 68 | # Try building it twice in case of flakey builds on Windows 69 | run: | 70 | cabal build all --enable-tests --enable-benchmarks --write-ghc-environment-files=ghc8.4.4+ || \ 71 | cabal build all --enable-tests --enable-benchmarks --write-ghc-environment-files=ghc8.4.4+ -j1 72 | 73 | - name: Test 74 | run: | 75 | cabal test all --enable-tests --enable-benchmarks --write-ghc-environment-files=ghc8.4.4+ 76 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Binaries 2 | 3 | defaults: 4 | run: 5 | shell: bash 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | tags: 12 | - 'v*' 13 | pull_request: 14 | workflow_dispatch: 15 | inputs: 16 | ref: 17 | description: "The commit, branch, or tag to build" 18 | required: true 19 | default: "main" 20 | 21 | jobs: 22 | build: 23 | runs-on: ${{ matrix.os }} 24 | 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | ghc: ["9.6.6", "9.4.8"] 29 | os: [ubuntu-latest, windows-latest] 30 | include: 31 | - ghc: "9.6.6" 32 | os: macOS-latest 33 | 34 | permissions: 35 | contents: write 36 | 37 | steps: 38 | - uses: actions/checkout@v4 39 | with: 40 | ref: ${{ github.event.inputs.ref }} # Checkout the specified branch, tag, or commit 41 | 42 | - uses: haskell-actions/setup@v2 43 | id: setup-haskell 44 | with: 45 | ghc-version: ${{ matrix.ghc }} 46 | cabal-version: 3.12.1.0 47 | 48 | - name: Set some window specific things 49 | if: matrix.os == 'windows-latest' 50 | run: echo 'EXE_EXT=.exe' >> $GITHUB_ENV 51 | 52 | - name: Cabal update 53 | run: cabal update 54 | 55 | - name: Configure project 56 | run: | 57 | cabal configure --enable-tests --enable-benchmarks --write-ghc-environment-files=ghc8.4.4+ 58 | cabal build all --dry-run 59 | 60 | - name: Cabal cache over S3 61 | uses: action-works/cabal-cache-s3@v1 62 | env: 63 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 64 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 65 | BINARY_CACHE_URI: ${{ vars.BINARY_CACHE_URI }} 66 | with: 67 | region: us-west-2 68 | dist-dir: dist-newstyle 69 | store-path: ${{ steps.setup-haskell.outputs.cabal-store }} 70 | threads: 16 71 | archive-uri: ${{ env.BINARY_CACHE_URI }} 72 | skip: "${{ env.BINARY_CACHE_URI == '' }}" 73 | 74 | - name: Cabal cache over HTTPS 75 | uses: action-works/cabal-cache-s3@v1 76 | with: 77 | dist-dir: dist-newstyle 78 | store-path: ${{ steps.setup-haskell.outputs.cabal-store }} 79 | threads: 16 80 | archive-uri: https://cache.haskellworks.io/archive 81 | 82 | - name: Build 83 | run: | 84 | cabal build all --enable-tests --enable-benchmarks 85 | 86 | - name: Test 87 | env: 88 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 89 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 90 | run: | 91 | cabal test all --enable-tests --enable-benchmarks --write-ghc-environment-files=ghc8.4.4+ 92 | 93 | - name: Compress Binary 94 | id: compress_binary 95 | env: 96 | GHC_VER: ${{ matrix.ghc }} 97 | run: | 98 | HS_BIN=$(find dist-newstyle \( -name 'cabal-cache' -o -name 'cabal-cache.exe' \) -type f | head -n 1) 99 | test -f "$HS_BIN" 100 | NAME="cabal-cache" 101 | 102 | case ${{ matrix.os }} in 103 | ubuntu-*) os="linux" ;; 104 | macOS-*) os="darwin" ;; 105 | windows-*) os="windows" ;; 106 | *) exit 1 ;; # error 107 | esac 108 | arch_os="$(uname -m)-$os" 109 | 110 | cp "$HS_BIN" "$NAME${{env.EXE_EXT}}" 111 | [ "$OS" != Windows_NT ] && strip "$NAME${{env.EXE_EXT}}" 112 | 113 | mkdir -p artifacts 114 | 115 | 7z a "artifacts/$NAME-$arch_os.zip" "$NAME${{env.EXE_EXT}}" 116 | echo "path_zip=$NAME.zip" >> $GITHUB_OUTPUT 117 | 118 | gzip -c --best "$NAME${{env.EXE_EXT}}" > artifacts/$NAME-$arch_os${{env.EXE_EXT}}.gz 119 | echo "path_gzip=$NAME.gz" >> $GITHUB_OUTPUT 120 | 121 | tar zcvf "artifacts/$NAME-$arch_os.tar.gz" "$NAME${{env.EXE_EXT}}" 122 | echo "path_targz=$NAME.targz" >> $GITHUB_OUTPUT 123 | 124 | - uses: actions/upload-artifact@v4 125 | with: 126 | name: cabal-cache-${{ runner.OS }}-${{ matrix.ghc }} 127 | path: artifacts 128 | 129 | check: 130 | needs: build 131 | runs-on: ubuntu-latest 132 | outputs: 133 | tag: ${{ steps.tag.outputs.tag }} 134 | 135 | steps: 136 | - uses: actions/checkout@v4 137 | 138 | - name: Check if cabal project is sane 139 | run: | 140 | PROJECT_DIR=$PWD 141 | mkdir -p $PROJECT_DIR/build/sdist 142 | for i in $(git ls-files | grep '\.cabal'); do 143 | cd $PROJECT_DIR && cd `dirname $i` 144 | cabal check 145 | done 146 | 147 | - name: Tag new version 148 | id: tag 149 | if: ${{ github.ref == 'refs/heads/main' }} 150 | env: 151 | server: http://hackage.haskell.org 152 | username: ${{ secrets.HACKAGE_USER }} 153 | # password: ${{ secrets.HACKAGE_PASS }} 154 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 155 | run: | 156 | package_version="$(cat *.cabal | grep '^version:' | cut -d : -f 2 | xargs)" 157 | 158 | echo "Package version is v$package_version" 159 | 160 | git fetch --unshallow origin 161 | 162 | if git tag "v$package_version"; then 163 | echo "Tagging with new version "v$package_version"" 164 | 165 | if git push origin "v$package_version"; then 166 | echo "Tagged with new version "v$package_version"" 167 | 168 | echo "tag=v$package_version" >> $GITHUB_OUTPUT 169 | fi 170 | fi 171 | 172 | release: 173 | needs: [build, check] 174 | runs-on: ubuntu-latest 175 | 176 | if: ${{ needs.check.outputs.tag != '' }} 177 | 178 | outputs: 179 | upload_url: ${{ steps.create_release.outputs.upload_url }} 180 | 181 | steps: 182 | - uses: actions/checkout@v4 183 | 184 | - name: Create source distribution 185 | run: | 186 | PROJECT_DIR=$PWD 187 | mkdir -p $PROJECT_DIR/build/sdist 188 | for i in $(git ls-files | grep '\.cabal'); do 189 | cd $PROJECT_DIR && cd `dirname $i` 190 | cabal v2-sdist -o $PROJECT_DIR/build/sdist 191 | done; 192 | 193 | - name: Publish to hackage 194 | env: 195 | server: http://hackage.haskell.org 196 | username: ${{ secrets.HACKAGE_USER }} 197 | password: ${{ secrets.HACKAGE_PASS }} 198 | candidate: true 199 | run: | 200 | package_version="$(cat *.cabal | grep '^version:' | cut -d : -f 2 | xargs)" 201 | 202 | for PACKAGE_TARBALL in $(find ./build/sdist/ -name "*.tar.gz"); do 203 | PACKAGE_NAME=$(basename ${PACKAGE_TARBALL%.*.*}) 204 | 205 | if ${{ env.candidate }}; then 206 | TARGET_URL="${{ env.server }}/packages/candidates"; 207 | DOCS_URL="${{ env.server }}/package/$PACKAGE_NAME/candidate/docs" 208 | else 209 | TARGET_URL="${{ env.server }}/packages/upload"; 210 | DOCS_URL="${{ env.server }}/package/$PACKAGE_NAME/docs" 211 | fi 212 | 213 | HACKAGE_STATUS=$(curl --silent --head -w %{http_code} -XGET --anyauth --user "${{ env.username }}:${{ env.password }}" ${{ env.server }}/package/$PACKAGE_NAME -o /dev/null) 214 | 215 | if [ "$HACKAGE_STATUS" = "404" ]; then 216 | echo "Uploading $PACKAGE_NAME to $TARGET_URL" 217 | 218 | curl -X POST -f --user "${{ env.username }}:${{ env.password }}" $TARGET_URL -F "package=@$PACKAGE_TARBALL" 219 | echo "Uploaded $PACKAGE_NAME" 220 | else 221 | echo "Package $PACKAGE_NAME" already exists on Hackage. 222 | fi 223 | done 224 | 225 | - name: Debug 226 | run: | 227 | echo "tag=${{ needs.check.outputs.tag }}" 228 | 229 | - name: Create Release 230 | id: create_release 231 | uses: actions/create-release@v1 232 | env: 233 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 234 | with: 235 | tag_name: ${{ needs.check.outputs.tag }} 236 | release_name: Release ${{ needs.check.outputs.tag }} 237 | body: Undocumented 238 | draft: true 239 | prerelease: false 240 | 241 | publish: 242 | needs: [build, release] 243 | 244 | runs-on: ${{ matrix.os }} 245 | 246 | if: ${{ needs.check.outputs.tag != '' }} 247 | 248 | strategy: 249 | fail-fast: false 250 | matrix: 251 | ghc: ["9.6.6"] 252 | os: [ubuntu-latest, macOS-latest, windows-latest] 253 | 254 | steps: 255 | - uses: actions/download-artifact@v4 256 | id: download_artifact 257 | with: 258 | name: cabal-cache-${{ runner.OS }}-${{ matrix.ghc }} 259 | path: artifacts 260 | 261 | - name: URL 262 | run: ls artifacts 263 | 264 | - name: Compute axes 265 | id: axes 266 | run: | 267 | case ${{ matrix.os }} in 268 | ubuntu-*) os="linux" ;; 269 | macOS-*) os="darwin" ;; 270 | windows-*) os="windows" ;; 271 | *) exit 1 ;; # error 272 | esac 273 | arch_os="$(uname -m)-$os" 274 | echo "arch_os=$arch_os" >> $GITHUB_OUTPUT 275 | 276 | - name: Set some window specific things 277 | if: matrix.os == 'windows-latest' 278 | run: echo 'EXE_EXT=.exe' >> $GITHUB_ENV 279 | 280 | - name: Upload Release Binary 281 | uses: actions/upload-release-asset@v1.0.2 282 | env: 283 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 284 | with: 285 | upload_url: ${{ needs.release.outputs.upload_url }} 286 | asset_path: artifacts/cabal-cache-${{ steps.axes.outputs.arch_os }}${{env.EXE_EXT}}.gz 287 | asset_name: cabal-cache-${{ steps.axes.outputs.arch_os }}.gz 288 | asset_content_type: application/gzip 289 | 290 | - name: Upload Release Binary 291 | uses: actions/upload-release-asset@v1.0.2 292 | env: 293 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 294 | with: 295 | upload_url: ${{ needs.release.outputs.upload_url }} 296 | asset_path: artifacts/cabal-cache-${{ steps.axes.outputs.arch_os }}.tar.gz 297 | asset_name: cabal-cache-${{ steps.axes.outputs.arch_os }}.tar.gz 298 | asset_content_type: application/gzip 299 | 300 | - name: Upload Release Binary 301 | uses: actions/upload-release-asset@v1.0.2 302 | env: 303 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 304 | with: 305 | upload_url: ${{ needs.release.outputs.upload_url }} 306 | asset_path: artifacts/cabal-cache-${{ steps.axes.outputs.arch_os }}.zip 307 | asset_name: cabal-cache-${{ steps.axes.outputs.arch_os }}.zip 308 | asset_content_type: application/zip 309 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | dist-* 3 | cabal-dev 4 | *.o 5 | *.hi 6 | *.chi 7 | *.chs.h 8 | *.dyn_o 9 | *.dyn_hi 10 | .hpc 11 | .hsenv 12 | .cabal-sandbox/ 13 | cabal.sandbox.config 14 | *.prof 15 | *.aux 16 | *.hp 17 | *.eventlog 18 | .stack-work/ 19 | cabal.project.local 20 | cabal.project.local~ 21 | .HTF/ 22 | .ghc.environment.* 23 | 24 | .vscode/ipch 25 | /hie.yaml 26 | -------------------------------------------------------------------------------- /.hlint.yaml: -------------------------------------------------------------------------------- 1 | # HLint configuration file 2 | # https://github.com/ndmitchell/hlint 3 | ########################## 4 | 5 | # This file contains a template configuration file, which is typically 6 | # placed as .hlint.yaml in the root of your project 7 | 8 | 9 | # Warnings currently triggered by your code 10 | - ignore: {name: "Unused LANGUAGE pragma"} 11 | - ignore: {name: "Move brackets to avoid $"} 12 | - ignore: {name: "Monoid law, left identity"} 13 | 14 | 15 | # Specify additional command line arguments 16 | # 17 | # - arguments: [--color, --cpp-simple, -XQuasiQuotes] 18 | 19 | 20 | # Control which extensions/flags/modules/functions can be used 21 | # 22 | # - extensions: 23 | # - default: false # all extension are banned by default 24 | # - name: [PatternGuards, ViewPatterns] # only these listed extensions can be used 25 | # - {name: CPP, within: CrossPlatform} # CPP can only be used in a given module 26 | # 27 | # - flags: 28 | # - {name: -w, within: []} # -w is allowed nowhere 29 | # 30 | # - modules: 31 | # - {name: [Data.Set, Data.HashSet], as: Set} # if you import Data.Set qualified, it must be as 'Set' 32 | # - {name: Control.Arrow, within: []} # Certain modules are banned entirely 33 | # 34 | # - functions: 35 | # - {name: unsafePerformIO, within: []} # unsafePerformIO can only appear in no modules 36 | 37 | 38 | # Add custom hints for this project 39 | # 40 | # Will suggest replacing "wibbleMany [myvar]" with "wibbleOne myvar" 41 | # - error: {lhs: "wibbleMany [x]", rhs: wibbleOne x} 42 | 43 | # The hints are named by the string they display in warning messages. 44 | # For example, if you see a warning starting like 45 | # 46 | # Main.hs:116:51: Warning: Redundant == 47 | # 48 | # You can refer to that hint with `{name: Redundant ==}` (see below). 49 | 50 | # Turn on hints that are off by default 51 | # 52 | # Ban "module X(module X) where", to require a real export list 53 | # - warn: {name: Use explicit module export list} 54 | # 55 | # Replace a $ b $ c with a . b $ c 56 | # - group: {name: dollar, enabled: true} 57 | # 58 | # Generalise map to fmap, ++ to <> 59 | # - group: {name: generalise, enabled: true} 60 | 61 | 62 | # Ignore some builtin hints 63 | # - ignore: {name: Use let} 64 | # - ignore: {name: Use const, within: SpecialModule} # Only within certain modules 65 | 66 | 67 | # Define some custom infix operators 68 | # - fixity: infixr 3 ~^#^~ 69 | 70 | 71 | # To generate a suitable file for HLint do: 72 | # $ hlint --default > .hlint.yaml 73 | -------------------------------------------------------------------------------- /.stylish-haskell.yaml: -------------------------------------------------------------------------------- 1 | # stylish-haskell configuration file 2 | # ================================== 3 | 4 | # The stylish-haskell tool is mainly configured by specifying steps. These steps 5 | # are a list, so they have an order, and one specific step may appear more than 6 | # once (if needed). Each file is processed by these steps in the given order. 7 | steps: 8 | # Convert some ASCII sequences to their Unicode equivalents. This is disabled 9 | # by default. 10 | # - unicode_syntax: 11 | # # In order to make this work, we also need to insert the UnicodeSyntax 12 | # # language pragma. If this flag is set to true, we insert it when it's 13 | # # not already present. You may want to disable it if you configure 14 | # # language extensions using some other method than pragmas. Default: 15 | # # true. 16 | # add_language_pragma: true 17 | 18 | # Align the right hand side of some elements. This is quite conservative 19 | # and only applies to statements where each element occupies a single 20 | # line. 21 | - simple_align: 22 | cases: true 23 | top_level_patterns: true 24 | records: true 25 | 26 | # Import cleanup 27 | - imports: 28 | # There are different ways we can align names and lists. 29 | # 30 | # - global: Align the import names and import list throughout the entire 31 | # file. 32 | # 33 | # - file: Like global, but don't add padding when there are no qualified 34 | # imports in the file. 35 | # 36 | # - group: Only align the imports per group (a group is formed by adjacent 37 | # import lines). 38 | # 39 | # - none: Do not perform any alignment. 40 | # 41 | # Default: global. 42 | align: group 43 | 44 | # Folowing options affect only import list alignment. 45 | # 46 | # List align has following options: 47 | # 48 | # - after_alias: Import list is aligned with end of import including 49 | # 'as' and 'hiding' keywords. 50 | # 51 | # > import qualified Data.List as List (concat, foldl, foldr, head, 52 | # > init, last, length) 53 | # 54 | # - with_alias: Import list is aligned with start of alias or hiding. 55 | # 56 | # > import qualified Data.List as List (concat, foldl, foldr, head, 57 | # > init, last, length) 58 | # 59 | # - new_line: Import list starts always on new line. 60 | # 61 | # > import qualified Data.List as List 62 | # > (concat, foldl, foldr, head, init, last, length) 63 | # 64 | # Default: after_alias 65 | list_align: after_alias 66 | 67 | list_align: with_module_name 68 | 69 | pad_module_names: true 70 | 71 | # Long list align style takes effect when import is too long. This is 72 | # determined by 'columns' setting. 73 | # 74 | # - inline: This option will put as much specs on same line as possible. 75 | # 76 | # - new_line: Import list will start on new line. 77 | # 78 | # - new_line_multiline: Import list will start on new line when it's 79 | # short enough to fit to single line. Otherwise it'll be multiline. 80 | # 81 | # - multiline: One line per import list entry. 82 | # Type with contructor list acts like single import. 83 | # 84 | # > import qualified Data.Map as M 85 | # > ( empty 86 | # > , singleton 87 | # > , ... 88 | # > , delete 89 | # > ) 90 | # 91 | # Default: inline 92 | long_list_align: inline 93 | 94 | # Align empty list (importing instances) 95 | # 96 | # Empty list align has following options 97 | # 98 | # - inherit: inherit list_align setting 99 | # 100 | # - right_after: () is right after the module name: 101 | # 102 | # > import Vector.Instances () 103 | # 104 | # Default: inherit 105 | empty_list_align: inherit 106 | 107 | # List padding determines indentation of import list on lines after import. 108 | # This option affects 'long_list_align'. 109 | # 110 | # - : constant value 111 | # 112 | # - module_name: align under start of module name. 113 | # Useful for 'file' and 'group' align settings. 114 | list_padding: 4 115 | 116 | # Separate lists option affects formating of import list for type 117 | # or class. The only difference is single space between type and list 118 | # of constructors, selectors and class functions. 119 | # 120 | # - true: There is single space between Foldable type and list of it's 121 | # functions. 122 | # 123 | # > import Data.Foldable (Foldable (fold, foldl, foldMap)) 124 | # 125 | # - false: There is no space between Foldable type and list of it's 126 | # functions. 127 | # 128 | # > import Data.Foldable (Foldable(fold, foldl, foldMap)) 129 | # 130 | # Default: true 131 | separate_lists: true 132 | 133 | # Language pragmas 134 | - language_pragmas: 135 | # We can generate different styles of language pragma lists. 136 | # 137 | # - vertical: Vertical-spaced language pragmas, one per line. 138 | # 139 | # - compact: A more compact style. 140 | # 141 | # - compact_line: Similar to compact, but wrap each line with 142 | # `{-#LANGUAGE #-}'. 143 | # 144 | # Default: vertical. 145 | style: vertical 146 | 147 | # Align affects alignment of closing pragma brackets. 148 | # 149 | # - true: Brackets are aligned in same collumn. 150 | # 151 | # - false: Brackets are not aligned together. There is only one space 152 | # between actual import and closing bracket. 153 | # 154 | # Default: true 155 | align: true 156 | 157 | # stylish-haskell can detect redundancy of some language pragmas. If this 158 | # is set to true, it will remove those redundant pragmas. Default: true. 159 | remove_redundant: true 160 | 161 | # Replace tabs by spaces. This is disabled by default. 162 | # - tabs: 163 | # # Number of spaces to use for each tab. Default: 8, as specified by the 164 | # # Haskell report. 165 | # spaces: 8 166 | 167 | # Remove trailing whitespace 168 | - trailing_whitespace: {} 169 | 170 | # A common setting is the number of columns (parts of) code will be wrapped 171 | # to. Different steps take this into account. Default: 80. 172 | columns: 800 173 | 174 | # By default, line endings are converted according to the OS. You can override 175 | # preferred format here. 176 | # 177 | # - native: Native newline format. CRLF on Windows, LF on other OSes. 178 | # 179 | # - lf: Convert to LF ("\n"). 180 | # 181 | # - crlf: Convert to CRLF ("\r\n"). 182 | # 183 | # Default: native. 184 | newline: native 185 | 186 | # Sometimes, language extensions are specified in a cabal file or from the 187 | # command line instead of using language pragmas in the file. stylish-haskell 188 | # needs to be aware of these, so it can parse the file correctly. 189 | # 190 | # No language extensions are enabled by default. 191 | # language_extensions: 192 | # - TemplateHaskell 193 | # - QuasiQuotes 194 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "Build", 6 | "type": "shell", 7 | "command": "bash", 8 | "args": ["-lc", "cabal build all --enable-tests && echo 'Done'"], 9 | "group": { 10 | "kind": "build", 11 | "isDefault": true 12 | }, 13 | "problemMatcher": { 14 | "owner": "haskell", 15 | "fileLocation": "relative", 16 | "pattern": [ 17 | { 18 | "regexp": "^(.+?):(\\d+):(\\d+):\\s+(error|warning|info):.*$", 19 | "file": 1, "line": 2, "column": 3, "severity": 4 20 | }, 21 | { 22 | "regexp": "\\s*(.*)$", 23 | "message": 1 24 | } 25 | ] 26 | }, 27 | "presentation": { 28 | "echo": false, 29 | "reveal": "always", 30 | "focus": false, 31 | "panel": "shared", 32 | "showReuseMessage": false, 33 | "clear": true 34 | } 35 | }, 36 | { 37 | "label": "Test", 38 | "type": "shell", 39 | "command": "bash", 40 | "args": ["-lc", "cabal test all --enable-tests && echo 'Done'"], 41 | "group": { 42 | "kind": "test", 43 | "isDefault": true 44 | }, 45 | "problemMatcher": { 46 | "owner": "haskell", 47 | "fileLocation": "relative", 48 | "pattern": [ 49 | { 50 | "regexp": "^(.+?):(\\d+):(\\d+):.*$", 51 | "file": 1, "line": 2, "column": 3, "severity": 4 52 | }, 53 | { 54 | "regexp": "\\s*(\\d\\)\\s)?(.*)$", 55 | "message": 2 56 | } 57 | ] 58 | }, 59 | "presentation": { 60 | "echo": false, 61 | "reveal": "always", 62 | "focus": false, 63 | "panel": "shared", 64 | "showReuseMessage": false, 65 | "clear": true 66 | } 67 | } 68 | ] 69 | } 70 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [Unreleased](https://github.com/haskell-works/cabal-cache/tree/HEAD) 4 | 5 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.1.0.0...HEAD) 6 | 7 | **Closed issues:** 8 | 9 | - sync-from-archive does not consider ${archive\_version}/${store\_hash} subfolder if syncing from \(local\) archive directory [\#236](https://github.com/haskell-works/cabal-cache/issues/236) 10 | - Wrong region parsing [\#230](https://github.com/haskell-works/cabal-cache/issues/230) 11 | 12 | **Merged pull requests:** 13 | 14 | - Issue 236 make sync from archive consider store hash folder [\#238](https://github.com/haskell-works/cabal-cache/pull/238) ([newhoggy](https://github.com/newhoggy)) 15 | - CHG: handle NotFound in readFirstAvailableResource to make sure that … [\#237](https://github.com/haskell-works/cabal-cache/pull/237) ([snetramo](https://github.com/snetramo)) 16 | - Update upper bounds [\#234](https://github.com/haskell-works/cabal-cache/pull/234) ([newhoggy](https://github.com/newhoggy)) 17 | 18 | ## [v1.1.0.0](https://github.com/haskell-works/cabal-cache/tree/v1.1.0.0) (2023-08-09) 19 | 20 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.6.1...v1.1.0.0) 21 | 22 | **Merged pull requests:** 23 | 24 | - Support newer ghcs [\#233](https://github.com/haskell-works/cabal-cache/pull/233) ([newhoggy](https://github.com/newhoggy)) 25 | - Upgrade to `amazonka-2` [\#232](https://github.com/haskell-works/cabal-cache/pull/232) ([newhoggy](https://github.com/newhoggy)) 26 | - Upgrade to haskell/actions/setup@v2 [\#229](https://github.com/haskell-works/cabal-cache/pull/229) ([newhoggy](https://github.com/newhoggy)) 27 | - Upgrade to haskell/actions/setup@v2 [\#228](https://github.com/haskell-works/cabal-cache/pull/228) ([newhoggy](https://github.com/newhoggy)) 28 | - Fix set-output warnings in CI [\#225](https://github.com/haskell-works/cabal-cache/pull/225) ([newhoggy](https://github.com/newhoggy)) 29 | - Ignore packages cli option [\#224](https://github.com/haskell-works/cabal-cache/pull/224) ([newhoggy](https://github.com/newhoggy)) 30 | - Upgrade oops [\#223](https://github.com/haskell-works/cabal-cache/pull/223) ([newhoggy](https://github.com/newhoggy)) 31 | - Split AppError type [\#220](https://github.com/haskell-works/cabal-cache/pull/220) ([newhoggy](https://github.com/newhoggy)) 32 | - Move S3 functions to own module. Drop antiope dependency [\#219](https://github.com/haskell-works/cabal-cache/pull/219) ([newhoggy](https://github.com/newhoggy)) 33 | - New cp command for debugging purposes [\#218](https://github.com/haskell-works/cabal-cache/pull/218) ([newhoggy](https://github.com/newhoggy)) 34 | - Remove Github Actions environment [\#217](https://github.com/haskell-works/cabal-cache/pull/217) ([newhoggy](https://github.com/newhoggy)) 35 | - Break apart app error [\#216](https://github.com/haskell-works/cabal-cache/pull/216) ([newhoggy](https://github.com/newhoggy)) 36 | - Remove project.sh [\#215](https://github.com/haskell-works/cabal-cache/pull/215) ([newhoggy](https://github.com/newhoggy)) 37 | - Remove tasks.json from git tracking [\#214](https://github.com/haskell-works/cabal-cache/pull/214) ([newhoggy](https://github.com/newhoggy)) 38 | - Use oops for error handling [\#213](https://github.com/haskell-works/cabal-cache/pull/213) ([newhoggy](https://github.com/newhoggy)) 39 | - Update copyright [\#212](https://github.com/haskell-works/cabal-cache/pull/212) ([newhoggy](https://github.com/newhoggy)) 40 | - Delete unused code [\#209](https://github.com/haskell-works/cabal-cache/pull/209) ([newhoggy](https://github.com/newhoggy)) 41 | - Tidy up cabal file [\#208](https://github.com/haskell-works/cabal-cache/pull/208) ([newhoggy](https://github.com/newhoggy)) 42 | - Fix sync-to-archive [\#206](https://github.com/haskell-works/cabal-cache/pull/206) ([hasufell](https://github.com/hasufell)) 43 | 44 | ## [v1.0.6.1](https://github.com/haskell-works/cabal-cache/tree/v1.0.6.1) (2023-02-04) 45 | 46 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.6.0...v1.0.6.1) 47 | 48 | ## [v1.0.6.0](https://github.com/haskell-works/cabal-cache/tree/v1.0.6.0) (2023-02-04) 49 | 50 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.5.5...v1.0.6.0) 51 | 52 | **Closed issues:** 53 | 54 | - Collaboration [\#211](https://github.com/haskell-works/cabal-cache/issues/211) 55 | 56 | ## [v1.0.5.5](https://github.com/haskell-works/cabal-cache/tree/v1.0.5.5) (2022-12-30) 57 | 58 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.5.4...v1.0.5.5) 59 | 60 | **Closed issues:** 61 | 62 | - cabal-cache exits with "thread blocked indefinitely" [\#76](https://github.com/haskell-works/cabal-cache/issues/76) 63 | 64 | **Merged pull requests:** 65 | 66 | - Restrictive retry [\#205](https://github.com/haskell-works/cabal-cache/pull/205) ([hasufell](https://github.com/hasufell)) 67 | 68 | ## [v1.0.5.4](https://github.com/haskell-works/cabal-cache/tree/v1.0.5.4) (2022-12-21) 69 | 70 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.5.2...v1.0.5.4) 71 | 72 | ## [v1.0.5.2](https://github.com/haskell-works/cabal-cache/tree/v1.0.5.2) (2022-12-21) 73 | 74 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.5.1...v1.0.5.2) 75 | 76 | **Closed issues:** 77 | 78 | - Should cabal-cache retry failed downloads/uploads? [\#191](https://github.com/haskell-works/cabal-cache/issues/191) 79 | - Trying to use root of the S3 bucket results in `/` key. [\#152](https://github.com/haskell-works/cabal-cache/issues/152) 80 | 81 | **Merged pull requests:** 82 | 83 | - Catch log and rethrow exceptions during download. [\#198](https://github.com/haskell-works/cabal-cache/pull/198) ([newhoggy](https://github.com/newhoggy)) 84 | - Try to fix STM bug wrt \#76 [\#196](https://github.com/haskell-works/cabal-cache/pull/196) ([newhoggy](https://github.com/newhoggy)) 85 | - Fix versioned compiler detection on windows [\#195](https://github.com/haskell-works/cabal-cache/pull/195) ([hasufell](https://github.com/hasufell)) 86 | - More robust store dir detection [\#194](https://github.com/haskell-works/cabal-cache/pull/194) ([hasufell](https://github.com/hasufell)) 87 | - Retry s3 upload/download, fixes \#191 [\#193](https://github.com/haskell-works/cabal-cache/pull/193) ([hasufell](https://github.com/hasufell)) 88 | - Bump upper bounds [\#185](https://github.com/haskell-works/cabal-cache/pull/185) ([newhoggy](https://github.com/newhoggy)) 89 | - Fix typos [\#184](https://github.com/haskell-works/cabal-cache/pull/184) ([newhoggy](https://github.com/newhoggy)) 90 | - New CI to sync with Backblaze s3 provider [\#183](https://github.com/haskell-works/cabal-cache/pull/183) ([newhoggy](https://github.com/newhoggy)) 91 | - Remove unused function anchor [\#106](https://github.com/haskell-works/cabal-cache/pull/106) ([newhoggy](https://github.com/newhoggy)) 92 | 93 | ## [v1.0.5.1](https://github.com/haskell-works/cabal-cache/tree/v1.0.5.1) (2022-12-01) 94 | 95 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.5.0...v1.0.5.1) 96 | 97 | **Merged pull requests:** 98 | 99 | - Fix bug with build-path [\#182](https://github.com/haskell-works/cabal-cache/pull/182) ([newhoggy](https://github.com/newhoggy)) 100 | 101 | ## [v1.0.5.0](https://github.com/haskell-works/cabal-cache/tree/v1.0.5.0) (2022-12-01) 102 | 103 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.4.0...v1.0.5.0) 104 | 105 | **Merged pull requests:** 106 | 107 | - New --path CLI option [\#181](https://github.com/haskell-works/cabal-cache/pull/181) ([newhoggy](https://github.com/newhoggy)) 108 | - More concrete types [\#178](https://github.com/haskell-works/cabal-cache/pull/178) ([newhoggy](https://github.com/newhoggy)) 109 | - Replace boolean [\#177](https://github.com/haskell-works/cabal-cache/pull/177) ([newhoggy](https://github.com/newhoggy)) 110 | - Update dependencies [\#175](https://github.com/haskell-works/cabal-cache/pull/175) ([newhoggy](https://github.com/newhoggy)) 111 | - Remove failing tests [\#172](https://github.com/haskell-works/cabal-cache/pull/172) ([newhoggy](https://github.com/newhoggy)) 112 | - Switch to use cabal-cache-s3 [\#170](https://github.com/haskell-works/cabal-cache/pull/170) ([newhoggy](https://github.com/newhoggy)) 113 | - Remove CircleCI support [\#146](https://github.com/haskell-works/cabal-cache/pull/146) ([newhoggy](https://github.com/newhoggy)) 114 | 115 | ## [v1.0.4.0](https://github.com/haskell-works/cabal-cache/tree/v1.0.4.0) (2022-03-15) 116 | 117 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.3.0...v1.0.4.0) 118 | 119 | **Closed issues:** 120 | 121 | - support multicloud [\#162](https://github.com/haskell-works/cabal-cache/issues/162) 122 | 123 | **Merged pull requests:** 124 | 125 | - Add support for https [\#169](https://github.com/haskell-works/cabal-cache/pull/169) ([newhoggy](https://github.com/newhoggy)) 126 | - Fix syncing from archive with multicloud [\#168](https://github.com/haskell-works/cabal-cache/pull/168) ([newhoggy](https://github.com/newhoggy)) 127 | - Fix syncing from archive with multicloud [\#167](https://github.com/haskell-works/cabal-cache/pull/167) ([hasufell](https://github.com/hasufell)) 128 | - Support multicloud, fixes \#162 [\#165](https://github.com/haskell-works/cabal-cache/pull/165) ([newhoggy](https://github.com/newhoggy)) 129 | - Support multicloud, fixes \#162 [\#163](https://github.com/haskell-works/cabal-cache/pull/163) ([hasufell](https://github.com/hasufell)) 130 | - Upgrade to ghc-8.10.7 and ghc-9.0.1 [\#161](https://github.com/haskell-works/cabal-cache/pull/161) ([newhoggy](https://github.com/newhoggy)) 131 | - Support ghc-8.10.4 [\#160](https://github.com/haskell-works/cabal-cache/pull/160) ([newhoggy](https://github.com/newhoggy)) 132 | - Fix warnings [\#159](https://github.com/haskell-works/cabal-cache/pull/159) ([newhoggy](https://github.com/newhoggy)) 133 | - Unify URI types [\#157](https://github.com/haskell-works/cabal-cache/pull/157) ([newhoggy](https://github.com/newhoggy)) 134 | 135 | ## [v1.0.3.0](https://github.com/haskell-works/cabal-cache/tree/v1.0.3.0) (2021-03-14) 136 | 137 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.2.2...v1.0.3.0) 138 | 139 | **Merged pull requests:** 140 | 141 | - New plan command [\#156](https://github.com/haskell-works/cabal-cache/pull/156) ([newhoggy](https://github.com/newhoggy)) 142 | - Avoid set-env in Github Actions [\#154](https://github.com/haskell-works/cabal-cache/pull/154) ([newhoggy](https://github.com/newhoggy)) 143 | - Publish releases [\#150](https://github.com/haskell-works/cabal-cache/pull/150) ([newhoggy](https://github.com/newhoggy)) 144 | - Fix warnings [\#148](https://github.com/haskell-works/cabal-cache/pull/148) ([newhoggy](https://github.com/newhoggy)) 145 | - Tweak caching [\#145](https://github.com/haskell-works/cabal-cache/pull/145) ([newhoggy](https://github.com/newhoggy)) 146 | - Remove unnecessary dependencies [\#142](https://github.com/haskell-works/cabal-cache/pull/142) ([newhoggy](https://github.com/newhoggy)) 147 | 148 | ## [v1.0.2.2](https://github.com/haskell-works/cabal-cache/tree/v1.0.2.2) (2020-10-25) 149 | 150 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.2.1...v1.0.2.2) 151 | 152 | ## [v1.0.2.1](https://github.com/haskell-works/cabal-cache/tree/v1.0.2.1) (2020-09-29) 153 | 154 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.2.0...v1.0.2.1) 155 | 156 | **Merged pull requests:** 157 | 158 | - Fix slash handling on Windows [\#141](https://github.com/haskell-works/cabal-cache/pull/141) ([newhoggy](https://github.com/newhoggy)) 159 | 160 | ## [v1.0.2.0](https://github.com/haskell-works/cabal-cache/tree/v1.0.2.0) (2020-09-29) 161 | 162 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.9...v1.0.2.0) 163 | 164 | **Merged pull requests:** 165 | 166 | - Fix macos builds [\#138](https://github.com/haskell-works/cabal-cache/pull/138) ([newhoggy](https://github.com/newhoggy)) 167 | - Add --build-path [\#137](https://github.com/haskell-works/cabal-cache/pull/137) ([newhoggy](https://github.com/newhoggy)) 168 | - Use cabal cache [\#136](https://github.com/haskell-works/cabal-cache/pull/136) ([newhoggy](https://github.com/newhoggy)) 169 | 170 | ## [v1.0.1.9](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.9) (2020-09-19) 171 | 172 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.8...v1.0.1.9) 173 | 174 | **Closed issues:** 175 | 176 | - Windows Builds [\#129](https://github.com/haskell-works/cabal-cache/issues/129) 177 | 178 | **Merged pull requests:** 179 | 180 | - Upgrade macos executor [\#134](https://github.com/haskell-works/cabal-cache/pull/134) ([newhoggy](https://github.com/newhoggy)) 181 | - Implement GitHub actions [\#133](https://github.com/haskell-works/cabal-cache/pull/133) ([newhoggy](https://github.com/newhoggy)) 182 | - Implement Github Actions [\#132](https://github.com/haskell-works/cabal-cache/pull/132) ([hazelweakly](https://github.com/hazelweakly)) 183 | - Win32 support [\#130](https://github.com/haskell-works/cabal-cache/pull/130) ([newhoggy](https://github.com/newhoggy)) 184 | - Upgrade to haskell-build@4.1.8 [\#128](https://github.com/haskell-works/cabal-cache/pull/128) ([newhoggy](https://github.com/newhoggy)) 185 | - Upgrade to optparse-applicative-0.16 [\#127](https://github.com/haskell-works/cabal-cache/pull/127) ([newhoggy](https://github.com/newhoggy)) 186 | - Fix macos builds [\#125](https://github.com/haskell-works/cabal-cache/pull/125) ([newhoggy](https://github.com/newhoggy)) 187 | - Use official cache http end-point [\#124](https://github.com/haskell-works/cabal-cache/pull/124) ([newhoggy](https://github.com/newhoggy)) 188 | - Upgrade to orb hackage@1.4.2 [\#123](https://github.com/haskell-works/cabal-cache/pull/123) ([newhoggy](https://github.com/newhoggy)) 189 | - Fix build [\#122](https://github.com/haskell-works/cabal-cache/pull/122) ([newhoggy](https://github.com/newhoggy)) 190 | - Fix hlint [\#121](https://github.com/haskell-works/cabal-cache/pull/121) ([newhoggy](https://github.com/newhoggy)) 191 | - Upgrade generic-lens [\#120](https://github.com/haskell-works/cabal-cache/pull/120) ([newhoggy](https://github.com/newhoggy)) 192 | - Disable parallel garbage colllector in CCI tests [\#119](https://github.com/haskell-works/cabal-cache/pull/119) ([newhoggy](https://github.com/newhoggy)) 193 | - Remove unused imports [\#118](https://github.com/haskell-works/cabal-cache/pull/118) ([newhoggy](https://github.com/newhoggy)) 194 | - Upgrade to hackage@1.4.1 [\#117](https://github.com/haskell-works/cabal-cache/pull/117) ([newhoggy](https://github.com/newhoggy)) 195 | - Upgrade to github-release@1.3.3 [\#116](https://github.com/haskell-works/cabal-cache/pull/116) ([newhoggy](https://github.com/newhoggy)) 196 | - Upgrade to haskell-build-4.1.7 [\#115](https://github.com/haskell-works/cabal-cache/pull/115) ([newhoggy](https://github.com/newhoggy)) 197 | 198 | ## [v1.0.1.8](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.8) (2020-03-24) 199 | 200 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.7...v1.0.1.8) 201 | 202 | **Merged pull requests:** 203 | 204 | - Use token [\#113](https://github.com/haskell-works/cabal-cache/pull/113) ([newhoggy](https://github.com/newhoggy)) 205 | - Convert i386 build to use binary cache [\#112](https://github.com/haskell-works/cabal-cache/pull/112) ([newhoggy](https://github.com/newhoggy)) 206 | 207 | ## [v1.0.1.7](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.7) (2020-03-24) 208 | 209 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.6...v1.0.1.7) 210 | 211 | ## [v1.0.1.6](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.6) (2020-03-24) 212 | 213 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.5.a...v1.0.1.6) 214 | 215 | **Merged pull requests:** 216 | 217 | - i386 build [\#111](https://github.com/haskell-works/cabal-cache/pull/111) ([newhoggy](https://github.com/newhoggy)) 218 | 219 | ## [v1.0.1.5.a](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.5.a) (2020-01-25) 220 | 221 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.5...v1.0.1.5.a) 222 | 223 | ## [v1.0.1.5](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.5) (2020-01-25) 224 | 225 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.4...v1.0.1.5) 226 | 227 | **Merged pull requests:** 228 | 229 | - Upgrade to haskell-build@4.0.6 [\#105](https://github.com/haskell-works/cabal-cache/pull/105) ([newhoggy](https://github.com/newhoggy)) 230 | - Upgrade to haskell-build@4.0.6 [\#104](https://github.com/haskell-works/cabal-cache/pull/104) ([newhoggy](https://github.com/newhoggy)) 231 | 232 | ## [v1.0.1.4](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.4) (2020-01-25) 233 | 234 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.3...v1.0.1.4) 235 | 236 | **Closed issues:** 237 | 238 | - ghc-pkg invocation doesn't honour current compiler [\#94](https://github.com/haskell-works/cabal-cache/issues/94) 239 | 240 | **Merged pull requests:** 241 | 242 | - Fix macos build [\#103](https://github.com/haskell-works/cabal-cache/pull/103) ([newhoggy](https://github.com/newhoggy)) 243 | - Upgrade to ghc-8.8.2 in CI [\#102](https://github.com/haskell-works/cabal-cache/pull/102) ([newhoggy](https://github.com/newhoggy)) 244 | - Support multiple archive-uris [\#101](https://github.com/haskell-works/cabal-cache/pull/101) ([newhoggy](https://github.com/newhoggy)) 245 | - Jky azure [\#100](https://github.com/haskell-works/cabal-cache/pull/100) ([newhoggy](https://github.com/newhoggy)) 246 | - Fix development build files [\#99](https://github.com/haskell-works/cabal-cache/pull/99) ([newhoggy](https://github.com/newhoggy)) 247 | - Tidy up cabal file [\#98](https://github.com/haskell-works/cabal-cache/pull/98) ([newhoggy](https://github.com/newhoggy)) 248 | 249 | ## [v1.0.1.3](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.3) (2019-12-06) 250 | 251 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.2...v1.0.1.3) 252 | 253 | **Merged pull requests:** 254 | 255 | - Use same ghc-pkg version as specified by GHC version in plan.json [\#97](https://github.com/haskell-works/cabal-cache/pull/97) ([newhoggy](https://github.com/newhoggy)) 256 | - Move command parsers to command modules [\#96](https://github.com/haskell-works/cabal-cache/pull/96) ([newhoggy](https://github.com/newhoggy)) 257 | - Direct test output and generate environment files [\#93](https://github.com/haskell-works/cabal-cache/pull/93) ([newhoggy](https://github.com/newhoggy)) 258 | 259 | ## [v1.0.1.2](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.2) (2019-10-26) 260 | 261 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.1...v1.0.1.2) 262 | 263 | **Closed issues:** 264 | 265 | - Allow use of non-AWS clouds [\#83](https://github.com/haskell-works/cabal-cache/issues/83) 266 | 267 | **Merged pull requests:** 268 | 269 | - Additional brew update to avoid ruby syntax issues [\#91](https://github.com/haskell-works/cabal-cache/pull/91) ([newhoggy](https://github.com/newhoggy)) 270 | - Additional brew update to avoid ruby syntax issues [\#90](https://github.com/haskell-works/cabal-cache/pull/90) ([newhoggy](https://github.com/newhoggy)) 271 | - CI for ghc-8.8.1 [\#88](https://github.com/haskell-works/cabal-cache/pull/88) ([newhoggy](https://github.com/newhoggy)) 272 | - Fix for ghc-8.8.1 [\#87](https://github.com/haskell-works/cabal-cache/pull/87) ([newhoggy](https://github.com/newhoggy)) 273 | - Upgrade generic-lens version [\#86](https://github.com/haskell-works/cabal-cache/pull/86) ([newhoggy](https://github.com/newhoggy)) 274 | - Bump upper-bound of optparse-applicative [\#84](https://github.com/haskell-works/cabal-cache/pull/84) ([newhoggy](https://github.com/newhoggy)) 275 | - Upgrade to haskell-build-4.0.2 [\#82](https://github.com/haskell-works/cabal-cache/pull/82) ([newhoggy](https://github.com/newhoggy)) 276 | - Upgrade haskell build orb version [\#81](https://github.com/haskell-works/cabal-cache/pull/81) ([newhoggy](https://github.com/newhoggy)) 277 | - Upgrade haskell-build orb version [\#78](https://github.com/haskell-works/cabal-cache/pull/78) ([newhoggy](https://github.com/newhoggy)) 278 | 279 | ## [v1.0.1.1](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.1) (2019-07-21) 280 | 281 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.1.0...v1.0.1.1) 282 | 283 | **Closed issues:** 284 | 285 | - renamePath errors [\#73](https://github.com/haskell-works/cabal-cache/issues/73) 286 | - Doesn't work [\#72](https://github.com/haskell-works/cabal-cache/issues/72) 287 | 288 | **Merged pull requests:** 289 | 290 | - Fix getLibFiles if directory does not exist wrt \#72 [\#75](https://github.com/haskell-works/cabal-cache/pull/75) ([hasufell](https://github.com/hasufell)) 291 | - Fix renamePath errors wrt \#73 [\#74](https://github.com/haskell-works/cabal-cache/pull/74) ([hasufell](https://github.com/hasufell)) 292 | 293 | ## [v1.0.1.0](https://github.com/haskell-works/cabal-cache/tree/v1.0.1.0) (2019-07-03) 294 | 295 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.12...v1.0.1.0) 296 | 297 | **Merged pull requests:** 298 | 299 | - use ubuntu:16.04 for max compatibility [\#71](https://github.com/haskell-works/cabal-cache/pull/71) ([dsturnbull](https://github.com/dsturnbull)) 300 | 301 | ## [v1.0.0.12](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.12) (2019-05-29) 302 | 303 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.11...v1.0.0.12) 304 | 305 | **Merged pull requests:** 306 | 307 | - Check for target file existence [\#69](https://github.com/haskell-works/cabal-cache/pull/69) ([AlexeyRaga](https://github.com/AlexeyRaga)) 308 | 309 | ## [v1.0.0.11](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.11) (2019-05-29) 310 | 311 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.10...v1.0.0.11) 312 | 313 | **Merged pull requests:** 314 | 315 | - Upgrade archive to v2 [\#68](https://github.com/haskell-works/cabal-cache/pull/68) ([newhoggy](https://github.com/newhoggy)) 316 | - Append metadata instead of prepend [\#67](https://github.com/haskell-works/cabal-cache/pull/67) ([newhoggy](https://github.com/newhoggy)) 317 | - Use relation package instead [\#66](https://github.com/haskell-works/cabal-cache/pull/66) ([newhoggy](https://github.com/newhoggy)) 318 | 319 | ## [v1.0.0.10](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.10) (2019-05-12) 320 | 321 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.9...v1.0.0.10) 322 | 323 | **Merged pull requests:** 324 | 325 | - Smaller footprint [\#65](https://github.com/haskell-works/cabal-cache/pull/65) ([AlexeyRaga](https://github.com/AlexeyRaga)) 326 | - Upgrade to haskell-build@2.0.2 [\#64](https://github.com/haskell-works/cabal-cache/pull/64) ([newhoggy](https://github.com/newhoggy)) 327 | - Upgrade to haskell-build@2.0.1 [\#63](https://github.com/haskell-works/cabal-cache/pull/63) ([newhoggy](https://github.com/newhoggy)) 328 | - Build OSX binaries [\#62](https://github.com/haskell-works/cabal-cache/pull/62) ([AlexeyRaga](https://github.com/AlexeyRaga)) 329 | 330 | ## [v1.0.0.9](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.9) (2019-05-12) 331 | 332 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.8...v1.0.0.9) 333 | 334 | **Merged pull requests:** 335 | 336 | - Add missing cases in display app error [\#61](https://github.com/haskell-works/cabal-cache/pull/61) ([newhoggy](https://github.com/newhoggy)) 337 | - Use latest cabal-cache again [\#60](https://github.com/haskell-works/cabal-cache/pull/60) ([newhoggy](https://github.com/newhoggy)) 338 | 339 | ## [v1.0.0.8](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.8) (2019-05-12) 340 | 341 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.7...v1.0.0.8) 342 | 343 | **Merged pull requests:** 344 | 345 | - Do not fail in cleanup [\#59](https://github.com/haskell-works/cabal-cache/pull/59) ([newhoggy](https://github.com/newhoggy)) 346 | - Enable read for binary cache in forks [\#58](https://github.com/haskell-works/cabal-cache/pull/58) ([newhoggy](https://github.com/newhoggy)) 347 | 348 | ## [v1.0.0.7](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.7) (2019-05-11) 349 | 350 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.6...v1.0.0.7) 351 | 352 | **Merged pull requests:** 353 | 354 | - Reduce number of queries in download [\#57](https://github.com/haskell-works/cabal-cache/pull/57) ([newhoggy](https://github.com/newhoggy)) 355 | - Remove duplicate download [\#56](https://github.com/haskell-works/cabal-cache/pull/56) ([newhoggy](https://github.com/newhoggy)) 356 | - New readFirstAvailableResource function [\#55](https://github.com/haskell-works/cabal-cache/pull/55) ([newhoggy](https://github.com/newhoggy)) 357 | - Use Either instead of Maybe during downloads [\#54](https://github.com/haskell-works/cabal-cache/pull/54) ([newhoggy](https://github.com/newhoggy)) 358 | 359 | ## [v1.0.0.6](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.6) (2019-05-11) 360 | 361 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.5...v1.0.0.6) 362 | 363 | **Merged pull requests:** 364 | 365 | - Upgrade to haskell-works/haskell-build@2.0.0 [\#53](https://github.com/haskell-works/cabal-cache/pull/53) ([newhoggy](https://github.com/newhoggy)) 366 | - Upgrade to haskell-build-2@1.6.18 [\#52](https://github.com/haskell-works/cabal-cache/pull/52) ([newhoggy](https://github.com/newhoggy)) 367 | 368 | ## [v1.0.0.5](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.5) (2019-05-10) 369 | 370 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.4...v1.0.0.5) 371 | 372 | ## [v1.0.0.4](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.4) (2019-05-10) 373 | 374 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.3...v1.0.0.4) 375 | 376 | **Merged pull requests:** 377 | 378 | - HTTP support [\#50](https://github.com/haskell-works/cabal-cache/pull/50) ([newhoggy](https://github.com/newhoggy)) 379 | - Do not fail if no access to bucket [\#49](https://github.com/haskell-works/cabal-cache/pull/49) ([newhoggy](https://github.com/newhoggy)) 380 | 381 | ## [v1.0.0.3](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.3) (2019-05-07) 382 | 383 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.2...v1.0.0.3) 384 | 385 | **Merged pull requests:** 386 | 387 | - Implement safe download [\#47](https://github.com/haskell-works/cabal-cache/pull/47) ([newhoggy](https://github.com/newhoggy)) 388 | 389 | ## [v1.0.0.2](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.2) (2019-05-03) 390 | 391 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.1...v1.0.0.2) 392 | 393 | **Merged pull requests:** 394 | 395 | - AWS logging [\#45](https://github.com/haskell-works/cabal-cache/pull/45) ([newhoggy](https://github.com/newhoggy)) 396 | - Log AWS errors [\#43](https://github.com/haskell-works/cabal-cache/pull/43) ([newhoggy](https://github.com/newhoggy)) 397 | - Download queue [\#42](https://github.com/haskell-works/cabal-cache/pull/42) ([newhoggy](https://github.com/newhoggy)) 398 | - New Relation type [\#41](https://github.com/haskell-works/cabal-cache/pull/41) ([newhoggy](https://github.com/newhoggy)) 399 | - Fix test module names [\#40](https://github.com/haskell-works/cabal-cache/pull/40) ([newhoggy](https://github.com/newhoggy)) 400 | - Add depends field to Package type [\#39](https://github.com/haskell-works/cabal-cache/pull/39) ([newhoggy](https://github.com/newhoggy)) 401 | - Rename modules from Ci.Assist to CabalCache [\#38](https://github.com/haskell-works/cabal-cache/pull/38) ([newhoggy](https://github.com/newhoggy)) 402 | - New stm dependency [\#37](https://github.com/haskell-works/cabal-cache/pull/37) ([newhoggy](https://github.com/newhoggy)) 403 | 404 | ## [v1.0.0.1](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.1) (2019-04-30) 405 | 406 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v1.0.0.0...v1.0.0.1) 407 | 408 | **Merged pull requests:** 409 | 410 | - Retry copy resource [\#36](https://github.com/haskell-works/cabal-cache/pull/36) ([newhoggy](https://github.com/newhoggy)) 411 | 412 | ## [v1.0.0.0](https://github.com/haskell-works/cabal-cache/tree/v1.0.0.0) (2019-04-26) 413 | 414 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.2.0.2...v1.0.0.0) 415 | 416 | **Merged pull requests:** 417 | 418 | - Add metadata [\#34](https://github.com/haskell-works/cabal-cache/pull/34) ([AlexeyRaga](https://github.com/AlexeyRaga)) 419 | - Restore from matching store-path-hash preferentially [\#33](https://github.com/haskell-works/cabal-cache/pull/33) ([newhoggy](https://github.com/newhoggy)) 420 | - Hashed store path in archived packages [\#32](https://github.com/haskell-works/cabal-cache/pull/32) ([newhoggy](https://github.com/newhoggy)) 421 | - DRY versioning handling [\#31](https://github.com/haskell-works/cabal-cache/pull/31) ([newhoggy](https://github.com/newhoggy)) 422 | - Add versioning to archive [\#30](https://github.com/haskell-works/cabal-cache/pull/30) ([newhoggy](https://github.com/newhoggy)) 423 | - Upgrade to haskell-build-2@1.6.7 [\#28](https://github.com/haskell-works/cabal-cache/pull/28) ([newhoggy](https://github.com/newhoggy)) 424 | - Typo in readme [\#27](https://github.com/haskell-works/cabal-cache/pull/27) ([ekmett](https://github.com/ekmett)) 425 | 426 | ## [v0.2.0.2](https://github.com/haskell-works/cabal-cache/tree/v0.2.0.2) (2019-04-25) 427 | 428 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.2.0.1...v0.2.0.2) 429 | 430 | ## [v0.2.0.1](https://github.com/haskell-works/cabal-cache/tree/v0.2.0.1) (2019-04-20) 431 | 432 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.2.0.0...v0.2.0.1) 433 | 434 | ## [v0.2.0.0](https://github.com/haskell-works/cabal-cache/tree/v0.2.0.0) (2019-04-20) 435 | 436 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.9...v0.2.0.0) 437 | 438 | **Merged pull requests:** 439 | 440 | - Create archive using tar cli [\#25](https://github.com/haskell-works/cabal-cache/pull/25) ([newhoggy](https://github.com/newhoggy)) 441 | - Introduce new Presence and Tagged datatypes to convey richer informat… [\#21](https://github.com/haskell-works/cabal-cache/pull/21) ([newhoggy](https://github.com/newhoggy)) 442 | 443 | ## [v0.1.0.9](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.9) (2019-04-20) 444 | 445 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.8...v0.1.0.9) 446 | 447 | ## [v0.1.0.8](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.8) (2019-04-19) 448 | 449 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.7...v0.1.0.8) 450 | 451 | ## [v0.1.0.7](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.7) (2019-04-19) 452 | 453 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.5...v0.1.0.7) 454 | 455 | ## [v0.1.0.5](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.5) (2019-04-15) 456 | 457 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.4...v0.1.0.5) 458 | 459 | ## [v0.1.0.4](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.4) (2019-04-15) 460 | 461 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.3...v0.1.0.4) 462 | 463 | **Merged pull requests:** 464 | 465 | - Make all paths platform-independent, some cleanup [\#19](https://github.com/haskell-works/cabal-cache/pull/19) ([AlexeyRaga](https://github.com/AlexeyRaga)) 466 | - Create missing directories [\#18](https://github.com/haskell-works/cabal-cache/pull/18) ([newhoggy](https://github.com/newhoggy)) 467 | - Cleanup paths [\#17](https://github.com/haskell-works/cabal-cache/pull/17) ([newhoggy](https://github.com/newhoggy)) 468 | - More logging [\#16](https://github.com/haskell-works/cabal-cache/pull/16) ([newhoggy](https://github.com/newhoggy)) 469 | 470 | ## [v0.1.0.3](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.3) (2019-04-13) 471 | 472 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.2...v0.1.0.3) 473 | 474 | **Merged pull requests:** 475 | 476 | - Auto create Package DB in sync-from-archive command [\#15](https://github.com/haskell-works/cabal-cache/pull/15) ([newhoggy](https://github.com/newhoggy)) 477 | 478 | ## [v0.1.0.2](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.2) (2019-04-13) 479 | 480 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.1...v0.1.0.2) 481 | 482 | **Merged pull requests:** 483 | 484 | - Auto create Package DB [\#14](https://github.com/haskell-works/cabal-cache/pull/14) ([newhoggy](https://github.com/newhoggy)) 485 | 486 | ## [v0.1.0.1](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.1) (2019-04-12) 487 | 488 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/v0.1.0.0...v0.1.0.1) 489 | 490 | **Merged pull requests:** 491 | 492 | - Add region options [\#12](https://github.com/haskell-works/cabal-cache/pull/12) ([newhoggy](https://github.com/newhoggy)) 493 | - Threadsafe logging [\#11](https://github.com/haskell-works/cabal-cache/pull/11) ([newhoggy](https://github.com/newhoggy)) 494 | - Add threads [\#10](https://github.com/haskell-works/cabal-cache/pull/10) ([newhoggy](https://github.com/newhoggy)) 495 | - Recache package db after sync from archive [\#9](https://github.com/haskell-works/cabal-cache/pull/9) ([newhoggy](https://github.com/newhoggy)) 496 | - Archive dylibs [\#8](https://github.com/haskell-works/cabal-cache/pull/8) ([newhoggy](https://github.com/newhoggy)) 497 | - Static link [\#7](https://github.com/haskell-works/cabal-cache/pull/7) ([AlexeyRaga](https://github.com/AlexeyRaga)) 498 | - Cleanup tar entries [\#6](https://github.com/haskell-works/cabal-cache/pull/6) ([AlexeyRaga](https://github.com/AlexeyRaga)) 499 | - Pass more package information [\#5](https://github.com/haskell-works/cabal-cache/pull/5) ([newhoggy](https://github.com/newhoggy)) 500 | - Fix pred [\#4](https://github.com/haskell-works/cabal-cache/pull/4) ([AlexeyRaga](https://github.com/AlexeyRaga)) 501 | - Template conf [\#3](https://github.com/haskell-works/cabal-cache/pull/3) ([AlexeyRaga](https://github.com/AlexeyRaga)) 502 | - Make conf files optional [\#2](https://github.com/haskell-works/cabal-cache/pull/2) ([newhoggy](https://github.com/newhoggy)) 503 | - Include conf [\#1](https://github.com/haskell-works/cabal-cache/pull/1) ([AlexeyRaga](https://github.com/AlexeyRaga)) 504 | 505 | ## [v0.1.0.0](https://github.com/haskell-works/cabal-cache/tree/v0.1.0.0) (2019-04-12) 506 | 507 | [Full Changelog](https://github.com/haskell-works/cabal-cache/compare/faf6083ca5b4d084f1e2e41d22db6a6a33b78a75...v0.1.0.0) 508 | 509 | 510 | 511 | \* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* 512 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019-2023, John Ky 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of John Ky nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cabal-cache 2 | [![master](https://circleci.com/gh/haskell-works/cabal-cache/tree/master.svg?style=svg)](https://circleci.com/gh/haskell-works/cabal-cache/tree/master) 3 | 4 | Tool for caching built cabal new-build packages. 5 | 6 | The tool is useful in development when you want to share your build haskell package dependencies of 7 | of a particular project with another developer and also in CI where caching is useful for reducing 8 | build times. 9 | 10 | `cabal-cache` supports syncing to an archive directory or to an S3 bucket. 11 | 12 | ## Installation 13 | 14 | Several installation methods are available. 15 | 16 | ### From source 17 | 18 | ```bash 19 | cabal new-install cabal-cache 20 | ``` 21 | 22 | ### Binaries 23 | 24 | Dowload binaries from https://github.com/haskell-works/cabal-cache/releases 25 | 26 | ### Using Homebrew on Mac OS X 27 | 28 | ```bash 29 | brew tap haskell-works/homebrew-haskell-works git@github.com:haskell-works/homebrew-haskell-works.git 30 | brew update 31 | brew install cabal-cache 32 | ``` 33 | 34 | ## Example usage 35 | 36 | Syncing built packages with S3 requires you have an S3 bucket with AWS 37 | credentials stored in the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environent variables. 38 | You should also know the AWS region the bucket was created in. 39 | 40 | ### Sync to archive 41 | 42 | Change into your project directory. 43 | 44 | Build the project with `cabal v2-build`. This will ensure your dependencies are built and 45 | will produce a `plan.json` file that is required for the `cabal-cache` tool to know which built 46 | packages to sync up. 47 | 48 | Run the following command to sync to S3. 49 | 50 | ```bash 51 | cabal-cache sync-to-archive --threads 16 --archive-uri s3://my-cabal-cache-bucket/archive --region Sydney 52 | ``` 53 | 54 | Run the following command to sync to archive directory. 55 | 56 | ```bash 57 | cabal-cache sync-to-archive --threads 16 --archive-uri archive --region Sydney 58 | ``` 59 | 60 | ### Sync from S3 61 | 62 | Change into your project directory. 63 | 64 | Build the project with `cabal v2-configure`. This will product a `plan.json` file that is required 65 | for the `cabal-cache` tool to know which built packages to sync down. 66 | 67 | Run the following command to sync from S3. 68 | 69 | ```bash 70 | cabal-cache sync-from-archive --threads 16 --archive-uri s3://my-cabal-cache-bucket/archive --region Sydney 71 | ``` 72 | 73 | Run the following command to sync from archive directory. 74 | 75 | ```bash 76 | cabal-cache sync-from-archive --threads 16 --archive-uri archive --region Sydney 77 | ``` 78 | 79 | ### Multicloud 80 | 81 | To run against a different service, use something like: 82 | 83 | ```bash 84 | cabal-cache sync-to-archive --threads 16 --archive-uri s3://my-cabal-cache-bucket/archive --host-name-override=s3.us-west.some-service.com --host-port-override=443 --host-ssl-override=True 85 | ``` 86 | 87 | ## The archive 88 | 89 | ### Archive tarball format 90 | 91 | Built packages are stored in tarballs which contain the following files: 92 | 93 | ```bash 94 | x ${compiler_id}/${package_id}/_CC_METADATA/store-path 95 | x ${compiler_id}/lib/libHS${package_id}-*.dylib 96 | x ${compiler_id}/${package_id} 97 | x ${compiler_id}/package.db/${package_id}.conf 98 | ``` 99 | 100 | Aside from the files in the `_CC_METADATA` directory, everything else is copied verbatim from cabal 101 | store from the corresponding location. This includes the `conf` file which may contain absolute paths 102 | that would cause the built package to be non-relocatable. 103 | 104 | As a work-around, the tarball also inclues the `_CC_METADATA/store-path` 105 | file which stores the cabal store path from which the cached package was derived. 106 | 107 | Upon unpacking, `cabal-cache` will rewrite the `conf` file to contain the new store path using the 108 | information store in the `_CC_METADATA/store-path` file. `_CC_METADATA` directory and its contents 109 | will be additionally unpacked making it easy to recognise packages that have been restored using 110 | `cabal-cache`. 111 | 112 | ### Archive directory structure 113 | 114 | The archive contains files in the following locations: 115 | 116 | ```bash 117 | /Users/jky/moo-archive/${archive_version}/${compiler_id}/${package_id}.tar.gz 118 | /Users/jky/moo-archive/${archive_version}/${store_hash}/${compiler_id}/${package_id}.tar.gz 119 | ``` 120 | 121 | Both tarballs are identical. If they both exist then the first may be a symlink to the second 122 | when store on the filesystem. 123 | 124 | The direct subdirectories of the archive is the `${archive_verson}`, for example `v2`. This is the 125 | version of the archive format. This corresponds to the major version of the `cabal-cache` package. 126 | 127 | The next directory may be the `${store_hash}` or the `${compiler_id}`. If it is the `${store_hash}` 128 | then the `${compiler_id}` will be a subdirectory of that. 129 | 130 | The `${store_hash}` is the hash of the store path from which the cached package originally came. 131 | 132 | `cabal-cache` will preferentially restore using this version if it is available and the `${store_hash}` 133 | matches the cabal store path that is being restore to. 134 | 135 | If the package matching the `${store_hash}` cannot be found, `cabal-cache` will fallback to the version 136 | without the `${store_hash}`. 137 | 138 | A version without a `${store-hash}` may not exist. See [Caveats](#caveats) for more information. 139 | 140 | ## Caveats 141 | 142 | ### Packages that use absolute paths to the cabal store 143 | 144 | Packages sometimes do things that cause their built artefacts to contain absolute paths to the cabal 145 | store. This unfortunately makes such built packages non-relocatable. 146 | 147 | It is recommended that you use a fixed cabal store path rather than the default `$HOME/.cabal/store` 148 | to avoid any potential issues. 149 | 150 | See https://github.com/haskell/cabal/issues/4097 for more information. 151 | 152 | Following are examples of how this might happen: 153 | 154 | #### Paths_$pkgname 155 | 156 | `Paths_$pkgname` modules have embedded within them the absolute path to the package in the cabal store 157 | which means that packages that use some features of this module are not relocatable depending on what 158 | they do. 159 | 160 | Packages may query this module to get access to the package's cabal store `share` directory which 161 | contains data files that the package can read at runtime. Using `cabal-cache` for such packages 162 | could mean that the package will be unable to find such data files. 163 | 164 | To protect against this, `cabal-cache` will by default not sync packages down from the archive 165 | if the package's cabal store `share` directory contain unusual files or directories _unless_ the 166 | `${store_hash}` matches. Currently it only considers the `doc` subdirectory to be usual. More 167 | exceptions may be added later. 168 | 169 | -------------------------------------------------------------------------------- /Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /app/App/Amazonka.hs: -------------------------------------------------------------------------------- 1 | module App.Amazonka 2 | ( mkAwsEnv 3 | ) where 4 | 5 | import Effectful 6 | import Effectful.Zoo.Amazonka.Data.AwsEnv 7 | import Effectful.Zoo.Amazonka.Api.Discover 8 | import Effectful.Zoo.Core 9 | import HaskellWorks.Prelude 10 | import HaskellWorks.Tuple 11 | 12 | import qualified Amazonka as AWS 13 | import qualified HaskellWorks.CabalCache.AWS.Env as AWS 14 | 15 | mkAwsEnv :: () 16 | => r <: IOE 17 | => AWS.Region 18 | -> Maybe (ByteString, Int, Bool) 19 | -> Maybe AWS.LogLevel 20 | -> Eff r AwsEnv 21 | mkAwsEnv region mHostEndpoint awsLogLevel = 22 | liftIO (AWS.mkEnv region (AWS.awsLogger awsLogLevel)) 23 | <&> maybe id (uncurry3 setAwsEnvEndpointOverride) mHostEndpoint 24 | -------------------------------------------------------------------------------- /app/App/Commands.hs: -------------------------------------------------------------------------------- 1 | module App.Commands where 2 | 3 | import App.Commands.Debug (cmdDebug) 4 | import App.Commands.Plan (cmdPlan) 5 | import App.Commands.SyncFromArchive (cmdSyncFromArchive) 6 | import App.Commands.SyncToArchive (cmdSyncToArchive) 7 | import App.Commands.Version (cmdVersion) 8 | import HaskellWorks.Prelude 9 | import Options.Applicative (Parser) 10 | 11 | import qualified Options.Applicative as OA 12 | 13 | {- HLINT ignore "Monoid law, left identity" -} 14 | 15 | commands :: Parser (IO ()) 16 | commands = commandsGeneral 17 | 18 | commandsGeneral :: Parser (IO ()) 19 | commandsGeneral = OA.subparser $ mempty 20 | <> OA.commandGroup "Commands:" 21 | <> cmdPlan 22 | <> cmdSyncFromArchive 23 | <> cmdSyncToArchive 24 | <> cmdVersion 25 | <> cmdDebug 26 | -------------------------------------------------------------------------------- /app/App/Commands/Debug.hs: -------------------------------------------------------------------------------- 1 | module App.Commands.Debug 2 | ( cmdDebug 3 | ) where 4 | 5 | import App.Commands.Debug.S3 (cmdS3) 6 | import HaskellWorks.Prelude 7 | 8 | import qualified Options.Applicative as OA 9 | 10 | {- HLINT ignore "Monoid law, left identity" -} 11 | 12 | commands :: OA.Parser (IO ()) 13 | commands = OA.subparser $ mempty 14 | <> cmdS3 15 | 16 | cmdDebug :: OA.Mod OA.CommandFields (IO ()) 17 | cmdDebug = OA.command "debug" $ flip OA.info OA.idm commands 18 | -------------------------------------------------------------------------------- /app/App/Commands/Debug/S3.hs: -------------------------------------------------------------------------------- 1 | module App.Commands.Debug.S3 2 | ( cmdS3 3 | ) where 4 | 5 | import App.Commands.Debug.S3.Cp (cmdCp) 6 | import HaskellWorks.Prelude 7 | 8 | import qualified Options.Applicative as OA 9 | 10 | {- HLINT ignore "Monoid law, left identity" -} 11 | 12 | commands :: OA.Parser (IO ()) 13 | commands = OA.subparser $ mempty 14 | <> cmdCp 15 | 16 | cmdS3 :: OA.Mod OA.CommandFields (IO ()) 17 | cmdS3 = OA.command "s3" $ flip OA.info OA.idm commands 18 | -------------------------------------------------------------------------------- /app/App/Commands/Debug/S3/Cp.hs: -------------------------------------------------------------------------------- 1 | module App.Commands.Debug.S3.Cp 2 | ( cmdCp, 3 | ) where 4 | 5 | import App.Amazonka 6 | import App.Commands.Options.Parser (text) 7 | import App.Commands.Options.Types (CpOptions (CpOptions)) 8 | import App.Run 9 | import Effectful.Zoo.Amazonka.Data.AwsError 10 | import Effectful.Zoo.Core.Error.Static 11 | import Effectful.Zoo.Lazy.Dynamic 12 | import HaskellWorks.CabalCache.AppError (AwsStatusError(..), displayAwsStatusError) 13 | import HaskellWorks.CabalCache.Error (CopyFailed(..), ExitFailure(..), UnsupportedUri) 14 | import HaskellWorks.Prelude 15 | import Network.URI (parseURI) 16 | 17 | import qualified Amazonka as AWS 18 | import qualified Amazonka.Data as AWS 19 | import qualified App.Commands.Options.Types as Z 20 | import qualified Data.Text as T 21 | import qualified HaskellWorks.CabalCache.AWS.S3 as AWS 22 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 23 | import qualified Options.Applicative as OA 24 | import qualified System.IO as IO 25 | 26 | {- HLINT ignore "Monoid law, left identity" -} 27 | {- HLINT ignore "Reduce duplication" -} 28 | {- HLINT ignore "Redundant do" -} 29 | 30 | runCp :: Z.CpOptions -> IO () 31 | runCp opts = runApp do 32 | let srcUri = opts.srcUri 33 | let dstUri = opts.dstUri 34 | let mHostEndpoint = opts.hostEndpoint 35 | let awsLogLevel = opts.awsLogLevel 36 | 37 | runLazy (mkAwsEnv opts.region mHostEndpoint awsLogLevel) do 38 | AWS.copyS3Uri srcUri dstUri 39 | & do trap @AwsStatusError \e -> do 40 | CIO.hPutStrLn IO.stderr $ "Copy failed: " <> displayAwsStatusError e 41 | throw ExitFailure 42 | & do trap @AwsError \e -> do 43 | CIO.hPutStrLn IO.stderr $ "Copy failed: " <> tshow e 44 | throw ExitFailure 45 | & do trap @CopyFailed \CopyFailed -> do 46 | CIO.hPutStrLn IO.stderr "Copy failed" 47 | throw ExitFailure 48 | & do trap @UnsupportedUri \e -> do 49 | CIO.hPutStrLn IO.stderr $ "Unsupported uri: " <> tshow e 50 | throw ExitFailure 51 | 52 | optsCp :: OA.Parser CpOptions 53 | optsCp = CpOptions 54 | <$> OA.option (OA.auto <|> text) 55 | ( OA.long "region" 56 | <> OA.metavar "AWS_REGION" 57 | <> OA.showDefault 58 | <> OA.value AWS.Oregon 59 | <> OA.help "The AWS region in which to operate" 60 | ) 61 | <*> OA.option (OA.maybeReader parseURI) 62 | ( OA.long "src-uri" 63 | <> OA.help "Source URI to copy from" 64 | <> OA.metavar "S3_URI" 65 | ) 66 | <*> OA.option (OA.maybeReader parseURI) 67 | ( OA.long "dst-uri" 68 | <> OA.help "Destination URI to copy to" 69 | <> OA.metavar "S3_URI" 70 | ) 71 | <*> optional 72 | ( OA.option (OA.eitherReader (AWS.fromText . T.pack)) 73 | ( OA.long "aws-log-level" 74 | <> OA.help "AWS Log Level. One of (Error, Info, Debug, Trace)" 75 | <> OA.metavar "AWS_LOG_LEVEL" 76 | ) 77 | ) 78 | <*> optional parseEndpoint 79 | 80 | parseEndpoint :: OA.Parser (ByteString, Int, Bool) 81 | parseEndpoint = 82 | (,,) 83 | <$> OA.option (OA.eitherReader (AWS.fromText . T.pack)) 84 | ( OA.long "host-name-override" 85 | <> OA.help "Override the host name (default: s3.amazonaws.com)" 86 | <> OA.metavar "HOST_NAME" 87 | ) 88 | <*> OA.option OA.auto 89 | ( OA.long "host-port-override" 90 | <> OA.help "Override the host port" 91 | <> OA.metavar "HOST_PORT" 92 | ) 93 | <*> OA.option OA.auto 94 | ( OA.long "host-ssl-override" 95 | <> OA.help "Override the host SSL" 96 | <> OA.metavar "HOST_SSL" 97 | ) 98 | 99 | cmdCp :: OA.Mod OA.CommandFields (IO ()) 100 | cmdCp = OA.command "cp" $ flip OA.info OA.idm $ runCp <$> optsCp 101 | -------------------------------------------------------------------------------- /app/App/Commands/Options/Parser.hs: -------------------------------------------------------------------------------- 1 | module App.Commands.Options.Parser 2 | ( optsVersion, 3 | optsPackageIds, 4 | text, 5 | ) where 6 | 7 | import App.Commands.Options.Types (VersionOptions (..)) 8 | import Data.Set (Set) 9 | import HaskellWorks.Prelude 10 | import Options.Applicative (Parser, ReadM) 11 | 12 | import qualified Amazonka.Data as AWS 13 | import qualified Data.Set as S 14 | import qualified Data.Text as T 15 | import qualified Data.Text as Text 16 | import qualified HaskellWorks.CabalCache.Types as Z 17 | import qualified Options.Applicative as OA 18 | 19 | optsVersion :: Parser VersionOptions 20 | optsVersion = pure VersionOptions 21 | 22 | text :: AWS.FromText a => ReadM a 23 | text = OA.eitherReader (AWS.fromText . Text.pack) 24 | 25 | optsPackageIds :: Parser (Set Z.PackageId) 26 | optsPackageIds = 27 | S.fromList . join <$> many 28 | ( OA.option packageIds 29 | ( OA.long "ignore-packages" 30 | <> OA.help "Packages to ignore" 31 | <> OA.metavar "PACKAGE_LIST" 32 | ) 33 | ) 34 | 35 | packageIds :: ReadM [Text] 36 | packageIds = OA.eitherReader \case 37 | "" -> pure [] 38 | s -> pure $ T.split (== ',') (T.pack s) 39 | -------------------------------------------------------------------------------- /app/App/Commands/Options/Types.hs: -------------------------------------------------------------------------------- 1 | module App.Commands.Options.Types 2 | ( CpOptions(..), 3 | PlanOptions(..), 4 | SyncFromArchiveOptions(..), 5 | SyncToArchiveOptions(..), 6 | VersionOptions(..), 7 | ) where 8 | 9 | import Data.Set (Set) 10 | import HaskellWorks.CabalCache.Location (Location) 11 | import HaskellWorks.CabalCache.Types (PackageId) 12 | import HaskellWorks.Prelude 13 | import Network.URI (URI) 14 | 15 | import qualified Amazonka as AWS 16 | import qualified Data.List.NonEmpty as NEL 17 | 18 | data CpOptions = CpOptions 19 | { region :: AWS.Region 20 | , srcUri :: URI 21 | , dstUri :: URI 22 | , awsLogLevel :: Maybe AWS.LogLevel 23 | , hostEndpoint :: Maybe (ByteString, Int, Bool) 24 | } deriving (Eq, Show, Generic) 25 | 26 | data SyncToArchiveOptions = SyncToArchiveOptions 27 | { region :: AWS.Region 28 | , archiveUri :: Location 29 | , path :: FilePath 30 | , buildPath :: FilePath 31 | , storePath :: FilePath 32 | , storePathHash :: Maybe String 33 | , threads :: Int 34 | , awsLogLevel :: Maybe AWS.LogLevel 35 | , hostEndpoint :: Maybe (ByteString, Int, Bool) 36 | , maxRetries :: Int 37 | , ignorePackages :: Set PackageId 38 | } deriving (Eq, Show, Generic) 39 | 40 | data PlanOptions = PlanOptions 41 | { path :: FilePath 42 | , buildPath :: FilePath 43 | , storePath :: FilePath 44 | , storePathHash :: Maybe String 45 | , outputFile :: FilePath 46 | } deriving (Eq, Show, Generic) 47 | 48 | data SyncFromArchiveOptions = SyncFromArchiveOptions 49 | { region :: AWS.Region 50 | , archiveUris :: NEL.NonEmpty Location 51 | , path :: FilePath 52 | , buildPath :: FilePath 53 | , storePath :: FilePath 54 | , storePathHash :: Maybe String 55 | , threads :: Int 56 | , awsLogLevel :: Maybe AWS.LogLevel 57 | , hostEndpoint :: Maybe (ByteString, Int, Bool) 58 | , maxRetries :: Int 59 | , ignorePackages :: Set PackageId 60 | } deriving (Eq, Show, Generic) 61 | 62 | data VersionOptions = VersionOptions deriving (Eq, Show, Generic) 63 | -------------------------------------------------------------------------------- /app/App/Commands/Plan.hs: -------------------------------------------------------------------------------- 1 | module App.Commands.Plan 2 | ( cmdPlan, 3 | ) 4 | where 5 | 6 | import Amazonka.Data qualified as AWS 7 | import App.Commands.Options.Types (PlanOptions (PlanOptions)) 8 | import App.Commands.Options.Types qualified as Z 9 | import App.Run 10 | import App.Static qualified as AS 11 | import Control.Lens (Each (each), (%~)) 12 | import Data.Aeson qualified as J 13 | import Data.ByteString.Lazy qualified as LBS 14 | import Data.Text qualified as T 15 | import Effectful.Zoo.Core.Error.Static 16 | import HaskellWorks.CabalCache.Core qualified as Z 17 | import HaskellWorks.CabalCache.Error (DecodeError, ExitFailure (..)) 18 | import HaskellWorks.CabalCache.Hash qualified as H 19 | import HaskellWorks.CabalCache.IO.Console qualified as CIO 20 | import HaskellWorks.CabalCache.Location (Location (..), (<.>), ()) 21 | import HaskellWorks.CabalCache.Version (archiveVersion) 22 | import HaskellWorks.Prelude 23 | import Options.Applicative (CommandFields, Mod, Parser) 24 | import Options.Applicative qualified as OA 25 | import System.IO qualified as IO 26 | 27 | {- HLINT ignore "Monoid law, left identity" -} 28 | {- HLINT ignore "Redundant do" -} 29 | {- HLINT ignore "Reduce duplication" -} 30 | 31 | runPlan :: Z.PlanOptions -> IO () 32 | runPlan opts = runApp do 33 | let storePath = opts.storePath 34 | let archiveUris = [LocalFile ""] 35 | let storePathHash = opts.storePathHash & fromMaybe (H.hashStorePath storePath) 36 | let versionedArchiveUris = archiveUris & each %~ ( archiveVersion) 37 | let outputFile = opts.outputFile 38 | 39 | CIO.putStrLn $ "Store path: " <> AWS.toText storePath 40 | CIO.putStrLn $ "Store path hash: " <> T.pack storePathHash 41 | CIO.putStrLn $ "Archive URIs: " <> tshow archiveUris 42 | CIO.putStrLn $ "Archive version: " <> archiveVersion 43 | 44 | planJson <- 45 | Z.loadPlan (opts.path opts.buildPath) 46 | & do 47 | trap @DecodeError \e -> do 48 | CIO.hPutStrLn IO.stderr $ "ERROR: Unable to parse plan.json file: " <> tshow e 49 | throw ExitFailure 50 | 51 | packages <- liftIO $ Z.getPackages storePath planJson 52 | 53 | plan <- forM packages $ \pInfo -> do 54 | let archiveFileBasename = pInfo.packageDir <.> ".tar.gz" 55 | let archiveFiles = versionedArchiveUris <&> ( T.pack archiveFileBasename) 56 | let scopedArchiveFiles = versionedArchiveUris <&> ( T.pack storePathHash T.pack archiveFileBasename) 57 | 58 | return $ archiveFiles <> scopedArchiveFiles 59 | 60 | if outputFile == "-" 61 | then liftIO $ LBS.putStr $ J.encode (fmap (fmap AWS.toText) plan) 62 | else liftIO $ LBS.writeFile outputFile $ J.encode (fmap (fmap AWS.toText) plan) 63 | 64 | optsPlan :: Parser PlanOptions 65 | optsPlan = 66 | PlanOptions 67 | <$> OA.strOption 68 | ( OA.long "path" 69 | <> OA.help "Path to cabal project. Defaults to \".\"" 70 | <> OA.metavar "DIRECTORY" 71 | <> OA.value AS.path 72 | ) 73 | <*> OA.strOption 74 | ( OA.long "build-path" 75 | <> OA.help ("Path to cabal build directory. Defaults to " <> show AS.buildPath) 76 | <> OA.metavar "DIRECTORY" 77 | <> OA.value AS.buildPath 78 | ) 79 | <*> OA.strOption 80 | ( OA.long "store-path" 81 | <> OA.help "Path to cabal store" 82 | <> OA.metavar "DIRECTORY" 83 | <> OA.value AS.cabalStoreDirectory 84 | ) 85 | <*> optional 86 | ( OA.strOption 87 | ( OA.long "store-path-hash" 88 | <> OA.help "Store path hash (do not use)" 89 | <> OA.metavar "HASH" 90 | ) 91 | ) 92 | <*> OA.strOption 93 | ( OA.long "output-file" 94 | <> OA.help "Output file" 95 | <> OA.metavar "FILE" 96 | <> OA.value "-" 97 | ) 98 | 99 | cmdPlan :: Mod CommandFields (IO ()) 100 | cmdPlan = OA.command "plan" $ flip OA.info OA.idm $ runPlan <$> optsPlan 101 | -------------------------------------------------------------------------------- /app/App/Commands/SyncFromArchive.hs: -------------------------------------------------------------------------------- 1 | {- HLINT ignore "Redundant id" -} 2 | 3 | module App.Commands.SyncFromArchive 4 | ( cmdSyncFromArchive, 5 | ) where 6 | 7 | import App.Amazonka 8 | import App.Commands.Options.Parser (optsPackageIds, text) 9 | import App.Commands.Options.Types (SyncFromArchiveOptions (SyncFromArchiveOptions)) 10 | import App.Run 11 | import Control.Lens ((^..), (%~), Each(each)) 12 | import Control.Lens.Combinators (traverse1) 13 | import Data.ByteString.Lazy.Search (replace) 14 | import Data.Generics.Product.Any (the) 15 | import Data.List.NonEmpty (NonEmpty) 16 | import Effectful 17 | import Effectful.Zoo.Amazonka.Data.AwsError 18 | import Effectful.Zoo.Core 19 | import Effectful.Zoo.Core.Error.Static 20 | import Effectful.Zoo.Lazy.Dynamic 21 | import HaskellWorks.CabalCache.AppError (AwsStatusError, HttpError (..), displayAwsStatusError, displayHttpError) 22 | import HaskellWorks.CabalCache.Concurrent.Fork 23 | import HaskellWorks.CabalCache.Concurrent.Type 24 | import HaskellWorks.CabalCache.Error (DecodeError(..), ExitFailure(..), InvalidUrl(..), NotFound, UnsupportedUri(..)) 25 | import HaskellWorks.CabalCache.IO.Lazy (readFirstAvailableResource) 26 | import HaskellWorks.CabalCache.IO.Tar (ArchiveError(..)) 27 | import HaskellWorks.CabalCache.Location (toLocation, (<.>), (), Location) 28 | import HaskellWorks.CabalCache.Metadata (loadMetadata) 29 | import HaskellWorks.CabalCache.Version (archiveVersion) 30 | import HaskellWorks.Prelude 31 | import Options.Applicative (CommandFields, Mod, Parser) 32 | import Options.Applicative.NonEmpty (some1) 33 | import System.Directory (createDirectoryIfMissing, doesDirectoryExist) 34 | 35 | import qualified Amazonka as AWS 36 | import qualified Amazonka.Data as AWS 37 | import qualified App.Commands.Options.Types as Z 38 | import qualified App.Static as AS 39 | import qualified Control.Concurrent.STM as STM 40 | import qualified Data.ByteString.Char8 as C8 41 | import qualified Data.ByteString.Lazy as LBS 42 | import qualified Data.List.NonEmpty as NEL 43 | import qualified Data.Map as M 44 | import qualified Data.Map.Strict as Map 45 | import qualified Data.Set as S 46 | import qualified Data.Text as T 47 | import qualified HaskellWorks.CabalCache.Concurrent.DownloadQueue as DQ 48 | import qualified HaskellWorks.CabalCache.Core as Z 49 | import qualified HaskellWorks.CabalCache.Data.List as L 50 | import qualified HaskellWorks.CabalCache.GhcPkg as GhcPkg 51 | import qualified HaskellWorks.CabalCache.Hash as H 52 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 53 | import qualified HaskellWorks.CabalCache.IO.Tar as IO 54 | import qualified HaskellWorks.CabalCache.Store as M 55 | import qualified HaskellWorks.CabalCache.Types as Z 56 | import qualified Options.Applicative as OA 57 | import qualified System.Directory as IO 58 | import qualified System.IO as IO 59 | import qualified System.IO.Temp as IO 60 | 61 | {- HLINT ignore "Monoid law, left identity" -} 62 | {- HLINT ignore "Reduce duplication" -} 63 | {- HLINT ignore "Redundant do" -} 64 | 65 | skippable :: Z.Package -> Bool 66 | skippable package = package.packageType == "pre-existing" 67 | 68 | recoverOrVoid :: forall x r. () 69 | => Eff (Error x : r) Void 70 | -> Eff r x 71 | recoverOrVoid f = 72 | f & fmap absurd 73 | & trap pure 74 | 75 | runSyncFromArchive :: Z.SyncFromArchiveOptions -> IO () 76 | runSyncFromArchive opts = runApp do 77 | let mHostEndpoint = opts.hostEndpoint :: Maybe (ByteString, Int, Bool) 78 | let storePath = opts.storePath 79 | let archiveUris = opts.archiveUris :: NonEmpty Location 80 | let threads = opts.threads 81 | let awsLogLevel = opts.awsLogLevel 82 | let versionedArchiveUris = archiveUris & traverse1 %~ ( archiveVersion) :: NonEmpty Location 83 | let storePathHash = opts.storePathHash & fromMaybe (H.hashStorePath storePath) 84 | let scopedArchiveUris = versionedArchiveUris & traverse1 %~ ( T.pack storePathHash) 85 | let maxRetries = opts.maxRetries 86 | let ignorePackages = opts.ignorePackages 87 | 88 | CIO.putStrLn $ "Store path: " <> AWS.toText storePath 89 | CIO.putStrLn $ "Store path hash: " <> T.pack storePathHash 90 | forM_ archiveUris $ \archiveUri -> do 91 | CIO.putStrLn $ "Archive URI: " <> AWS.toText archiveUri 92 | CIO.putStrLn $ "Archive version: " <> archiveVersion 93 | CIO.putStrLn $ "Threads: " <> tshow threads 94 | CIO.putStrLn $ "AWS Log level: " <> tshow awsLogLevel 95 | 96 | planJson <- Z.loadPlan (opts.path opts.buildPath) 97 | & do trap @DecodeError \e -> do 98 | CIO.hPutStrLn IO.stderr $ "ERROR: Unable to parse plan.json file: " <> tshow e 99 | throw ExitFailure 100 | 101 | compilerContext <- Z.mkCompilerContext planJson 102 | & do trap @Text \e -> do 103 | CIO.hPutStrLn IO.stderr e 104 | throw ExitFailure 105 | 106 | liftIO $ GhcPkg.testAvailability compilerContext 107 | 108 | let compilerId = planJson.compilerId 109 | let storeCompilerPath = storePath T.unpack compilerId 110 | let storeCompilerPackageDbPath = storeCompilerPath "package.db" 111 | let storeCompilerLibPath = storeCompilerPath "lib" 112 | 113 | CIO.putStrLn "Creating store directories" 114 | liftIO $ createDirectoryIfMissing True storePath 115 | liftIO $ createDirectoryIfMissing True storeCompilerPath 116 | liftIO $ createDirectoryIfMissing True storeCompilerLibPath 117 | 118 | storeCompilerPackageDbPathExists <- liftIO $ doesDirectoryExist storeCompilerPackageDbPath 119 | 120 | unless storeCompilerPackageDbPathExists do 121 | CIO.putStrLn "Package DB missing. Creating Package DB" 122 | liftIO $ GhcPkg.contextInit compilerContext storeCompilerPackageDbPath 123 | 124 | packages <- liftIO $ Z.getPackages storePath planJson 125 | 126 | let installPlan = planJson.installPlan 127 | let planPackages = M.fromList $ fmap (\p -> (p.id, p)) installPlan 128 | 129 | let planDeps0 = installPlan >>= \p -> fmap (p.id, ) $ mempty 130 | <> p.depends 131 | <> p.exeDepends 132 | <> (p ^.. the @"components" . each . the @"lib" . each . the @"depends" . each) 133 | <> (p ^.. the @"components" . each . the @"lib" . each . the @"exeDepends" . each) 134 | let planDeps = planDeps0 <> fmap (\p -> ("[universe]", p.id)) installPlan 135 | 136 | downloadQueue <- liftIO $ STM.atomically $ DQ.createDownloadQueue planDeps 137 | 138 | let pInfos = M.fromList $ fmap (\p -> (p.packageId, p)) packages 139 | 140 | runLazy (mkAwsEnv opts.region mHostEndpoint awsLogLevel) do 141 | IO.withSystemTempDirectory "cabal-cache" $ \tempPath -> do 142 | liftIO $ IO.createDirectoryIfMissing True (tempPath T.unpack compilerId "package.db") 143 | 144 | forkThreadsWait threads $ DQ.runQueue downloadQueue $ \packageId -> do 145 | recoverOrVoid @DQ.DownloadStatus do 146 | pInfo <- pure (M.lookup packageId pInfos) 147 | & do onNothingM do 148 | CIO.hPutStrLn IO.stderr $ "Warning: Invalid package id: " <> packageId 149 | DQ.downloadSucceed 150 | 151 | let archiveBaseName = pInfo.packageDir <.> ".tar.gz" 152 | let archiveFiles = versionedArchiveUris & traverse1 %~ ( T.pack archiveBaseName) 153 | let scopedArchiveFiles = scopedArchiveUris & traverse1 %~ ( T.pack archiveBaseName) 154 | let packageStorePath = storePath pInfo.packageDir 155 | let packageName = pInfo.packageName 156 | 157 | storeDirectoryExists <- liftIO $ doesDirectoryExist packageStorePath 158 | 159 | package <- pure (M.lookup packageId planPackages) 160 | & do onNothingM do 161 | CIO.hPutStrLn IO.stderr $ "Warning: package not found" <> packageName 162 | DQ.downloadSucceed 163 | 164 | when (skippable package) do 165 | CIO.putStrLn $ "Skipping: " <> packageName 166 | DQ.downloadSucceed 167 | 168 | when (packageName `S.member` ignorePackages) do 169 | CIO.putStrLn $ "Ignoring: " <> packageName 170 | DQ.downloadFail 171 | 172 | when storeDirectoryExists DQ.downloadSucceed 173 | 174 | ensureStorePathCleanup packageStorePath do 175 | let locations = sconcat $ fmap L.tuple2ToNel (NEL.zip archiveFiles scopedArchiveFiles) 176 | 177 | (existingArchiveFileContents, existingArchiveFile) <- readFirstAvailableResource locations maxRetries 178 | & do trap @AwsError \e -> do 179 | CIO.putStrLn $ "Unable to download any of: " <> tshow locations <> " because: " <> tshow e 180 | DQ.downloadFail 181 | & do trap @AwsStatusError \e -> do 182 | CIO.putStrLn $ "Unable to download any of: " <> tshow locations <> " because: " <> displayAwsStatusError e 183 | DQ.downloadFail 184 | & do trap @HttpError \e -> do 185 | CIO.putStrLn $ "Unable to download any of: " <> tshow locations <> " because: " <> displayHttpError e 186 | DQ.downloadFail 187 | & do trap @NotFound \_ -> do 188 | CIO.putStrLn $ "Not found: " <> tshow locations 189 | DQ.downloadFail 190 | & do trap @InvalidUrl \(InvalidUrl url' reason') -> do 191 | CIO.hPutStrLn IO.stderr $ "Invalid URL: " <> tshow url' <> ", " <> reason' 192 | DQ.downloadFail 193 | & do trap @UnsupportedUri \e -> do 194 | CIO.hPutStrLn IO.stderr $ tshow e 195 | DQ.downloadFail 196 | 197 | CIO.putStrLn $ "Extracting: " <> AWS.toText existingArchiveFile 198 | 199 | let tempArchiveFile = tempPath archiveBaseName 200 | liftIO $ LBS.writeFile tempArchiveFile existingArchiveFileContents 201 | 202 | IO.extractTar tempArchiveFile storePath 203 | & do trap @ArchiveError \(ArchiveError reason') -> do 204 | CIO.putStrLn $ "Unable to extract tar at " <> tshow tempArchiveFile <> " because: " <> reason' 205 | DQ.downloadFail 206 | 207 | meta <- loadMetadata packageStorePath 208 | oldStorePath <- pure (Map.lookup "store-path" meta) 209 | & do onNothingM do 210 | CIO.putStrLn "store-path is missing from Metadata" 211 | DQ.downloadFail 212 | 213 | let Z.Tagged conf _ = pInfo.confPath 214 | 215 | let theConfPath = storePath conf 216 | let tempConfPath = tempPath conf 217 | confPathExists <- liftIO $ IO.doesFileExist theConfPath 218 | when confPathExists do 219 | confContents <- liftIO $ LBS.readFile theConfPath 220 | liftIO $ LBS.writeFile tempConfPath (replace (LBS.toStrict oldStorePath) (C8.pack storePath) confContents) 221 | liftIO $ IO.copyFile tempConfPath theConfPath >> IO.removeFile tempConfPath 222 | 223 | DQ.downloadSucceed 224 | 225 | CIO.putStrLn "Recaching package database" 226 | 227 | liftIO $ GhcPkg.recache compilerContext storeCompilerPackageDbPath 228 | 229 | failures <- liftIO $ STM.atomically $ STM.readTVar downloadQueue.tFailures 230 | 231 | forM_ failures $ \packageId -> CIO.hPutStrLn IO.stderr $ "Failed to download: " <> packageId 232 | 233 | ensureStorePathCleanup :: () 234 | => r <: Error DQ.DownloadStatus 235 | => r <: IOE 236 | => FilePath 237 | -> Eff r a 238 | -> Eff r a 239 | ensureStorePathCleanup packageStorePath = 240 | trapIn @DQ.DownloadStatus \downloadStatus -> do 241 | case downloadStatus of 242 | DQ.DownloadFailure -> M.cleanupStorePath packageStorePath 243 | DQ.DownloadSuccess -> 244 | CIO.hPutStrLn IO.stdout $ "Successfully cleaned up store path: " <> tshow packageStorePath 245 | throw downloadStatus 246 | 247 | optsSyncFromArchive :: Parser SyncFromArchiveOptions 248 | optsSyncFromArchive = SyncFromArchiveOptions 249 | <$> OA.option (OA.auto <|> text) 250 | ( OA.long "region" 251 | <> OA.metavar "AWS_REGION" 252 | <> OA.showDefault 253 | <> OA.value AWS.Oregon 254 | <> OA.help "The AWS region in which to operate" 255 | ) 256 | <*> some1 257 | ( OA.option (OA.maybeReader (toLocation . T.pack)) 258 | ( OA.long "archive-uri" 259 | <> OA.help "Archive URI to sync to" 260 | <> OA.metavar "S3_URI" 261 | ) 262 | ) 263 | <*> OA.strOption 264 | ( OA.long "path" 265 | <> OA.help "Path to cabal project directory. Defaults to \".\"" 266 | <> OA.metavar "DIRECTORY" 267 | <> OA.value AS.path 268 | ) 269 | <*> OA.strOption 270 | ( OA.long "build-path" 271 | <> OA.help ("Path to cabal build directory. Defaults to " <> show AS.buildPath) 272 | <> OA.metavar "DIRECTORY" 273 | <> OA.value AS.buildPath 274 | ) 275 | <*> OA.strOption 276 | ( OA.long "store-path" 277 | <> OA.help ("Path to cabal store. Defaults to " <> show AS.cabalStoreDirectory) 278 | <> OA.metavar "DIRECTORY" 279 | <> OA.value AS.cabalStoreDirectory 280 | ) 281 | <*> optional 282 | ( OA.strOption 283 | ( OA.long "store-path-hash" 284 | <> OA.help "Store path hash (do not use)" 285 | <> OA.metavar "HASH" 286 | ) 287 | ) 288 | <*> OA.option OA.auto 289 | ( OA.long "threads" 290 | <> OA.help "Number of concurrent threads" 291 | <> OA.metavar "NUM_THREADS" 292 | <> OA.value 4 293 | ) 294 | <*> optional 295 | ( OA.option (OA.eitherReader (AWS.fromText . T.pack)) 296 | ( OA.long "aws-log-level" 297 | <> OA.help "AWS Log Level. One of (Error, Info, Debug, Trace)" 298 | <> OA.metavar "AWS_LOG_LEVEL" 299 | ) 300 | ) 301 | <*> optional parseEndpoint 302 | <*> OA.option OA.auto 303 | ( OA.long "max-retries" 304 | <> OA.help "Max retries for S3 requests" 305 | <> OA.metavar "NUM_RETRIES" 306 | <> OA.value 3 307 | ) 308 | <*> optsPackageIds 309 | 310 | parseEndpoint :: Parser (ByteString, Int, Bool) 311 | parseEndpoint = 312 | (,,) 313 | <$> OA.option (OA.eitherReader (AWS.fromText . T.pack)) 314 | ( OA.long "host-name-override" 315 | <> OA.help "Override the host name (default: s3.amazonaws.com)" 316 | <> OA.metavar "HOST_NAME" 317 | ) 318 | <*> OA.option OA.auto 319 | ( OA.long "host-port-override" 320 | <> OA.help "Override the host port" 321 | <> OA.metavar "HOST_PORT" 322 | ) 323 | <*> OA.option OA.auto 324 | ( OA.long "host-ssl-override" 325 | <> OA.help "Override the host SSL" 326 | <> OA.metavar "HOST_SSL" 327 | ) 328 | 329 | cmdSyncFromArchive :: Mod CommandFields (IO ()) 330 | cmdSyncFromArchive = OA.command "sync-from-archive" $ flip OA.info OA.idm $ runSyncFromArchive <$> optsSyncFromArchive 331 | -------------------------------------------------------------------------------- /app/App/Commands/SyncToArchive.hs: -------------------------------------------------------------------------------- 1 | {- HLINT ignore "Functor law" -} 2 | 3 | module App.Commands.SyncToArchive 4 | ( cmdSyncToArchive, 5 | ) where 6 | 7 | import App.Amazonka 8 | import App.Commands.Options.Parser (optsPackageIds, text) 9 | import App.Commands.Options.Types (SyncToArchiveOptions (SyncToArchiveOptions)) 10 | import App.Run 11 | import Control.Lens ((^..), Each(each)) 12 | import Data.Generics.Product.Any (the) 13 | import Data.List ((\\)) 14 | import Effectful 15 | import Effectful.Concurrent.Async 16 | import Effectful.Concurrent.STM 17 | import Effectful.Zoo.Amazonka.Data.AwsError 18 | import Effectful.Zoo.Core 19 | import Effectful.Zoo.Core.Error.Static 20 | import Effectful.Zoo.Lazy.Dynamic 21 | import HaskellWorks.CabalCache.AppError (AwsStatusError, HttpError (..), displayAwsStatusError, displayHttpError) 22 | import HaskellWorks.CabalCache.Error (DecodeError, ExitFailure(..), InvalidUrl(..), NotImplemented(..), UnsupportedUri(..)) 23 | import HaskellWorks.CabalCache.IO.Tar (ArchiveError) 24 | import HaskellWorks.CabalCache.Location (Location (..), toLocation, (<.>), ()) 25 | import HaskellWorks.CabalCache.Metadata (createMetadata) 26 | import HaskellWorks.CabalCache.Topology (buildPlanData, canShare) 27 | import HaskellWorks.CabalCache.Types 28 | import HaskellWorks.CabalCache.Version (archiveVersion) 29 | import HaskellWorks.Prelude 30 | import Options.Applicative (Parser, Mod, CommandFields) 31 | import System.Directory (doesDirectoryExist) 32 | import System.FilePath (takeDirectory) 33 | 34 | import qualified Amazonka as AWS 35 | import qualified Amazonka.Data as AWS 36 | import qualified App.Commands.Options.Types as Z 37 | import qualified App.Static as AS 38 | import qualified Control.Concurrent.STM as STM 39 | import qualified Data.ByteString.Lazy as LBS 40 | import qualified Data.ByteString.Lazy.Char8 as LC8 41 | import qualified Data.Set as S 42 | import qualified Data.Text as T 43 | import qualified Data.Text as Text 44 | import qualified HaskellWorks.CabalCache.Core as Z 45 | import qualified HaskellWorks.CabalCache.GhcPkg as GhcPkg 46 | import qualified HaskellWorks.CabalCache.Hash as H 47 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 48 | import qualified HaskellWorks.CabalCache.IO.File as IO 49 | import qualified HaskellWorks.CabalCache.IO.Lazy as IO 50 | import qualified HaskellWorks.CabalCache.IO.Tar as IO 51 | import qualified Options.Applicative as OA 52 | import qualified System.Directory as IO 53 | import qualified System.IO as IO 54 | import qualified System.IO.Temp as IO 55 | 56 | {- HLINT ignore "Monoid law, left identity" -} 57 | {- HLINT ignore "Redundant do" -} 58 | {- HLINT ignore "Reduce duplication" -} 59 | 60 | data WorkResult = WorkSkipped | WorkFatal 61 | deriving (Eq, Show) 62 | 63 | runSyncToArchive :: Z.SyncToArchiveOptions -> IO () 64 | runSyncToArchive opts = runApp do 65 | tEarlyExit <- newTVarIO False 66 | 67 | let mHostEndpoint = opts.hostEndpoint 68 | let storePath = opts.storePath 69 | let archiveUri = opts.archiveUri 70 | let threads = opts.threads 71 | let awsLogLevel = opts.awsLogLevel 72 | let versionedArchiveUri = archiveUri archiveVersion 73 | let storePathHash = opts.storePathHash & fromMaybe (H.hashStorePath storePath) 74 | let scopedArchiveUri = versionedArchiveUri T.pack storePathHash 75 | let maxRetries = opts.maxRetries 76 | let ignorePackages = opts.ignorePackages 77 | 78 | CIO.putStrLn $ "Store path: " <> AWS.toText storePath 79 | CIO.putStrLn $ "Store path hash: " <> T.pack storePathHash 80 | CIO.putStrLn $ "Archive URI: " <> AWS.toText archiveUri 81 | CIO.putStrLn $ "Archive version: " <> archiveVersion 82 | CIO.putStrLn $ "Threads: " <> tshow threads 83 | CIO.putStrLn $ "AWS Log level: " <> tshow awsLogLevel 84 | 85 | planJson <- Z.loadPlan (opts.path opts.buildPath) 86 | & do trap @DecodeError \e -> do 87 | CIO.hPutStrLn IO.stderr $ "ERROR: Unable to parse plan.json file: " <> tshow e 88 | throw ExitFailure 89 | 90 | compilerContext <- Z.mkCompilerContext planJson 91 | & do trap @Text \e -> do 92 | CIO.hPutStrLn IO.stderr e 93 | throw ExitFailure 94 | 95 | let compilerId = planJson.compilerId 96 | 97 | let archivePath = versionedArchiveUri compilerId 98 | let scopedArchivePath = scopedArchiveUri compilerId 99 | 100 | liftIO $ IO.createLocalDirectoryIfMissing archivePath 101 | liftIO $ IO.createLocalDirectoryIfMissing scopedArchivePath 102 | 103 | packages <- liftIO $ Z.getPackages storePath planJson 104 | 105 | nonShareable <- packages & filterM (fmap not . isShareable storePath) 106 | 107 | let planData = buildPlanData planJson (nonShareable ^.. each . the @"packageId") 108 | 109 | let storeCompilerPath = storePath T.unpack compilerId 110 | let storeCompilerPackageDbPath = storeCompilerPath "package.db" 111 | 112 | storeCompilerPackageDbPathExists <- liftIO $ doesDirectoryExist storeCompilerPackageDbPath 113 | 114 | unless storeCompilerPackageDbPathExists $ 115 | liftIO $ GhcPkg.contextInit compilerContext storeCompilerPackageDbPath 116 | 117 | CIO.putStrLn $ "Syncing " <> tshow (length packages) <> " packages" 118 | 119 | runLazy (mkAwsEnv opts.region mHostEndpoint awsLogLevel) do 120 | IO.withSystemTempDirectory "cabal-cache" $ \tempPath -> do 121 | CIO.putStrLn $ "Temp path: " <> tshow tempPath 122 | 123 | pooledForConcurrentlyN_ opts.threads packages $ \pInfo -> do 124 | workLoop tEarlyExit do 125 | let archiveFileBasename = pInfo.packageDir <.> ".tar.gz" 126 | let archiveFile = versionedArchiveUri T.pack archiveFileBasename 127 | let scopedArchiveFile = versionedArchiveUri T.pack storePathHash T.pack archiveFileBasename 128 | let packageStorePath = storePath pInfo.packageDir 129 | let packageName = pInfo.packageName 130 | 131 | when (packageName `S.member` ignorePackages) do 132 | CIO.hPutStrLn IO.stderr $ "Ignoring package: " <> packageName 133 | throw WorkSkipped 134 | 135 | -- either write "normal" package, or a user-specific one if the package cannot be shared 136 | let targetFile = if canShare planData pInfo.packageId then archiveFile else scopedArchiveFile 137 | 138 | archiveFileExists <- IO.resourceExists targetFile 139 | & do trap @InvalidUrl \(InvalidUrl url' reason') -> do 140 | CIO.hPutStrLn IO.stderr $ "Invalid URL: " <> tshow url' <> ", " <> reason' 141 | throw WorkSkipped 142 | & do trap @UnsupportedUri \e -> do 143 | CIO.hPutStrLn IO.stderr $ "Unsupported URI: " <> tshow e 144 | throw WorkSkipped 145 | & do trap @AwsError \e -> do 146 | CIO.hPutStrLn IO.stderr $ "Unsupported URI: " <> tshow e 147 | throw WorkSkipped 148 | & do trap @AwsStatusError \e -> do 149 | CIO.hPutStrLn IO.stderr $ "Unsupported URI: " <> tshow e 150 | throw WorkSkipped 151 | 152 | unless archiveFileExists do 153 | packageStorePathExists <- liftIO $ doesDirectoryExist packageStorePath 154 | 155 | when packageStorePathExists do 156 | let workingStorePackagePath = tempPath pInfo.packageDir 157 | liftIO $ IO.createDirectoryIfMissing True workingStorePackagePath 158 | 159 | let rp2 = Z.relativePaths storePath pInfo 160 | 161 | CIO.putStrLn $ "Creating " <> AWS.toText targetFile 162 | 163 | let tempArchiveFile = tempPath archiveFileBasename 164 | 165 | metas <- createMetadata tempPath pInfo [("store-path", LC8.pack storePath)] 166 | 167 | IO.createTar tempArchiveFile (rp2 <> [metas]) 168 | & do trap @ArchiveError \_ -> do 169 | CIO.hPutStrLn IO.stderr $ "Unable tar " <> tshow tempArchiveFile 170 | throw WorkSkipped 171 | 172 | (liftIO (LBS.readFile tempArchiveFile) >>= IO.writeResource targetFile maxRetries) 173 | & do trap @AwsError \e -> do 174 | CIO.hPutStrLn IO.stderr $ mempty 175 | <> "ERROR: No write access to archive uris: " 176 | <> tshow (fmap AWS.toText [scopedArchiveFile, archiveFile]) 177 | <> " " <> tshow e 178 | throw WorkFatal 179 | & do trap @AwsStatusError \e -> do 180 | CIO.hPutStrLn IO.stderr $ mempty 181 | <> "ERROR: No write access to archive uris: " 182 | <> tshow (fmap AWS.toText [scopedArchiveFile, archiveFile]) 183 | <> " " <> displayAwsStatusError e 184 | throw WorkFatal 185 | & do trap @HttpError \e -> do 186 | CIO.hPutStrLn IO.stderr $ mempty 187 | <> "ERROR: No write access to archive uris: " 188 | <> tshow (fmap AWS.toText [scopedArchiveFile, archiveFile]) 189 | <> " " <> displayHttpError e 190 | throw WorkFatal 191 | & do trap @NotImplemented \e -> do 192 | CIO.hPutStrLn IO.stderr $ mempty 193 | <> "Operation not implemented: " 194 | <> tshow (fmap AWS.toText [scopedArchiveFile, archiveFile]) 195 | <> " " <> tshow e 196 | throw WorkFatal 197 | & do trap @UnsupportedUri \e -> do 198 | CIO.hPutStrLn IO.stderr $ mempty 199 | <> "Unsupported URI: " 200 | <> tshow (fmap AWS.toText [scopedArchiveFile, archiveFile]) 201 | <> ": " <> tshow e 202 | throw WorkFatal 203 | 204 | earlyExit <- readTVarIO tEarlyExit 205 | 206 | when earlyExit $ CIO.hPutStrLn IO.stderr "Early exit due to error" 207 | 208 | workLoop :: () 209 | => r <: Concurrent 210 | => TVar Bool 211 | -> Eff (Error WorkResult : r) () 212 | -> Eff r () 213 | workLoop tEarlyExit f = do 214 | earlyExit <- readTVarIO tEarlyExit 215 | 216 | unless earlyExit do 217 | f & trap @WorkResult \case 218 | WorkSkipped -> pure () 219 | WorkFatal -> atomically $ STM.writeTVar tEarlyExit True 220 | 221 | isShareable :: MonadIO m => FilePath -> Z.PackageInfo -> m Bool 222 | isShareable storePath pkg = 223 | let packageSharePath = storePath pkg.packageDir "share" 224 | in IO.listMaybeDirectory packageSharePath <&> (\\ ["doc"]) <&> null 225 | 226 | optsSyncToArchive :: Parser SyncToArchiveOptions 227 | optsSyncToArchive = SyncToArchiveOptions 228 | <$> OA.option (OA.auto <|> text) 229 | ( OA.long "region" 230 | <> OA.metavar "AWS_REGION" 231 | <> OA.showDefault 232 | <> OA.value AWS.Oregon 233 | <> OA.help "The AWS region in which to operate" 234 | ) 235 | <*> OA.option (OA.maybeReader (toLocation . Text.pack)) 236 | ( OA.long "archive-uri" 237 | <> OA.help "Archive URI to sync to" 238 | <> OA.metavar "S3_URI" 239 | <> OA.value (LocalFile $ takeDirectory AS.cabalStoreDirectory "archive") 240 | ) 241 | <*> OA.strOption 242 | ( OA.long "path" 243 | <> OA.help "Path to cabal project directory. Defaults to \".\"" 244 | <> OA.metavar "DIRECTORY" 245 | <> OA.value AS.path 246 | ) 247 | <*> OA.strOption 248 | ( OA.long "build-path" 249 | <> OA.help ("Path to cabal build directory. Defaults to " <> show AS.buildPath) 250 | <> OA.metavar "DIRECTORY" 251 | <> OA.value AS.buildPath 252 | ) 253 | <*> OA.strOption 254 | ( OA.long "store-path" 255 | <> OA.help "Path to cabal store" 256 | <> OA.metavar "DIRECTORY" 257 | <> OA.value AS.cabalStoreDirectory 258 | ) 259 | <*> optional 260 | ( OA.strOption 261 | ( OA.long "store-path-hash" 262 | <> OA.help "Store path hash (do not use)" 263 | <> OA.metavar "HASH" 264 | ) 265 | ) 266 | <*> OA.option OA.auto 267 | ( OA.long "threads" 268 | <> OA.help "Number of concurrent threads" 269 | <> OA.metavar "NUM_THREADS" 270 | <> OA.value 4 271 | ) 272 | <*> optional 273 | ( OA.option (OA.eitherReader (AWS.fromText . T.pack)) 274 | ( OA.long "aws-log-level" 275 | <> OA.help "AWS Log Level. One of (Error, Info, Debug, Trace)" 276 | <> OA.metavar "AWS_LOG_LEVEL" 277 | ) 278 | ) 279 | <*> optional parseEndpoint 280 | <*> OA.option OA.auto 281 | ( OA.long "max-retries" 282 | <> OA.help "Max retries for S3 requests" 283 | <> OA.metavar "NUM_RETRIES" 284 | <> OA.value 3 285 | ) 286 | <*> optsPackageIds 287 | 288 | parseEndpoint :: Parser (ByteString, Int, Bool) 289 | parseEndpoint = 290 | (,,) 291 | <$> OA.option (OA.eitherReader (AWS.fromText . T.pack)) 292 | ( OA.long "host-name-override" 293 | <> OA.help "Override the host name (default: s3.amazonaws.com)" 294 | <> OA.metavar "HOST_NAME" 295 | ) 296 | <*> OA.option OA.auto 297 | ( OA.long "host-port-override" 298 | <> OA.help "Override the host port" 299 | <> OA.metavar "HOST_PORT" 300 | ) 301 | <*> OA.option OA.auto 302 | ( OA.long "host-ssl-override" 303 | <> OA.help "Override the host SSL" 304 | <> OA.metavar "HOST_SSL" 305 | ) 306 | 307 | cmdSyncToArchive :: Mod CommandFields (IO ()) 308 | cmdSyncToArchive = OA.command "sync-to-archive" $ flip OA.info OA.idm $ runSyncToArchive <$> optsSyncToArchive 309 | -------------------------------------------------------------------------------- /app/App/Commands/Version.hs: -------------------------------------------------------------------------------- 1 | module App.Commands.Version 2 | ( cmdVersion, 3 | ) where 4 | 5 | import App.Commands.Options.Parser (optsVersion) 6 | import HaskellWorks.Prelude 7 | import Options.Applicative (Mod, CommandFields) 8 | 9 | import qualified App.Commands.Options.Types as Z 10 | import qualified Data.Text as T 11 | import qualified Data.Version as V 12 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 13 | import qualified Paths_cabal_cache as P 14 | import qualified Options.Applicative as OA 15 | import qualified Data.List as L 16 | 17 | {- HLINT ignore "Redundant do" -} 18 | {- HLINT ignore "Reduce duplication" -} 19 | 20 | runVersion :: Z.VersionOptions -> IO () 21 | runVersion _ = do 22 | let V.Version {..} = P.version 23 | let version = L.intercalate "." $ fmap show versionBranch 24 | 25 | CIO.putStrLn $ "cabal-cache " <> T.pack version 26 | 27 | cmdVersion :: Mod CommandFields (IO ()) 28 | cmdVersion = OA.command "version" $ flip OA.info OA.idm $ runVersion <$> optsVersion 29 | -------------------------------------------------------------------------------- /app/App/Run.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ImpredicativeTypes #-} 2 | 3 | module App.Run 4 | ( runApp, 5 | ) where 6 | 7 | import HaskellWorks.CabalCache.Error 8 | import HaskellWorks.CabalCache.Exit 9 | import Effectful 10 | import Effectful.Concurrent 11 | import Effectful.Environment 12 | import Effectful.Resource 13 | import Effectful.Zoo.Amazonka.Api.Run 14 | import Effectful.Zoo.Amazonka.Data.AwsLogEntry 15 | import Effectful.Zoo.Core 16 | import Effectful.Zoo.Core.Error.Static 17 | import Effectful.Zoo.DataLog.Data.LogEntry 18 | import Effectful.Zoo.DataLog.Dynamic 19 | import Effectful.Zoo.Log.Data.LogMessage 20 | import Effectful.Zoo.Log.Data.Severity 21 | import Effectful.Zoo.Log.Dynamic 22 | import HaskellWorks.Prelude 23 | 24 | import qualified Data.Text as T 25 | import qualified System.IO as IO 26 | 27 | {- HLINT ignore "Monoid law, left identity" -} 28 | {- HLINT ignore "Reduce duplication" -} 29 | {- HLINT ignore "Redundant do" -} 30 | 31 | writeLog :: () 32 | => r <: IOE 33 | => CallStack 34 | -> Severity 35 | -> Text 36 | -> Eff r () 37 | writeLog _ _ t = do 38 | liftIO $ IO.hPutStrLn IO.stderr $ T.unpack t -- TODO write severity 39 | 40 | runApp :: () 41 | => Eff 42 | ( Error ExitFailure 43 | : DataLog AwsLogEntry 44 | : DataLog (LogEntry (LogMessage Text)) 45 | : Log Text 46 | : Environment 47 | : Concurrent 48 | : Resource 49 | : IOE 50 | : '[] 51 | ) a 52 | -> IO a 53 | runApp f = 54 | f 55 | & catchAndExitFailure @ExitFailure 56 | & runDataLogAwsLogEntryToLog 57 | & runDataLog @(LogEntry (LogMessage Text)) (\_ -> pure ()) -- TODO log these properly 58 | & runLog (ConcUnlift Persistent Unlimited) writeLog 59 | & runEnvironment 60 | & runConcurrent 61 | & runResource 62 | & runEff 63 | -------------------------------------------------------------------------------- /app/App/Static.hs: -------------------------------------------------------------------------------- 1 | module App.Static 2 | ( cabalStoreDirectory, 3 | buildPath, 4 | path, 5 | ) where 6 | 7 | import Control.Monad.Catch (handle) 8 | import HaskellWorks.Prelude 9 | 10 | import qualified App.Static.Base as S 11 | import qualified App.Static.Posix as P 12 | import qualified App.Static.Windows as W 13 | import qualified Cabal.Config as CC 14 | import qualified System.IO.Unsafe as IO 15 | 16 | 17 | {-# NOINLINE cabalStoreDirectory #-} 18 | cabalStoreDirectory :: FilePath 19 | cabalStoreDirectory = IO.unsafePerformIO $ handle' $ 20 | runIdentity . CC.cfgStoreDir <$> CC.readConfig 21 | where 22 | handle' = handle (\(_ :: IOException) -> return $ if S.isPosix then P.cabalDirectory else W.cabalDirectory) 23 | 24 | buildPath :: FilePath 25 | buildPath = "dist-newstyle" 26 | 27 | path :: FilePath 28 | path = "." 29 | -------------------------------------------------------------------------------- /app/App/Static/Base.hs: -------------------------------------------------------------------------------- 1 | module App.Static.Base 2 | ( homeDirectory, 3 | isPosix, 4 | ) where 5 | 6 | import HaskellWorks.Prelude 7 | 8 | import qualified System.Directory as IO 9 | import qualified System.IO.Unsafe as IO 10 | import qualified System.Info as I 11 | 12 | homeDirectory :: FilePath 13 | homeDirectory = IO.unsafePerformIO IO.getHomeDirectory 14 | {-# NOINLINE homeDirectory #-} 15 | 16 | isPosix :: Bool 17 | isPosix = I.os /= "mingw32" 18 | {-# NOINLINE isPosix #-} 19 | -------------------------------------------------------------------------------- /app/App/Static/Posix.hs: -------------------------------------------------------------------------------- 1 | module App.Static.Posix 2 | ( cabalDirectory, 3 | ) where 4 | 5 | import HaskellWorks.CabalCache.Location (()) 6 | import HaskellWorks.Prelude 7 | 8 | import qualified App.Static.Base as S 9 | import qualified System.Directory as IO 10 | import qualified System.Environment as IO 11 | import qualified System.IO as IO 12 | import qualified System.IO.Unsafe as IO 13 | 14 | cabalDirectory :: FilePath 15 | cabalDirectory = 16 | IO.unsafePerformIO do 17 | IO.lookupEnv "CABAL_DIR" >>= \case 18 | Just dir -> pure dir 19 | Nothing -> do 20 | let oldCabalDir = S.homeDirectory ".cabal" 21 | let newCabaldir = S.homeDirectory ".local/state/cabal/store" 22 | oldExists <- IO.doesDirectoryExist oldCabalDir 23 | newExists <- IO.doesDirectoryExist newCabaldir 24 | case (oldExists, newExists) of 25 | (True, True) -> do 26 | IO.hPutStrLn IO.stderr "Warning: Both ~/.cabal and ~/.local/state/cabal/store exist" 27 | pure newCabaldir 28 | (True, _) -> pure oldCabalDir 29 | (_, True) -> pure newCabaldir 30 | _ -> fail "No cabal directory found" 31 | -------------------------------------------------------------------------------- /app/App/Static/Windows.hs: -------------------------------------------------------------------------------- 1 | module App.Static.Windows 2 | ( appDataDirectory, 3 | cabalDirectory, 4 | ) where 5 | 6 | import HaskellWorks.CabalCache.Location (()) 7 | import HaskellWorks.Prelude 8 | 9 | import qualified App.Static.Base as S 10 | import qualified System.Environment as IO 11 | import qualified System.IO.Unsafe as IO 12 | 13 | appDataDirectory :: FilePath 14 | appDataDirectory = IO.unsafePerformIO $ fmap (fromMaybe S.homeDirectory) (IO.lookupEnv "APPDATA") 15 | {-# NOINLINE appDataDirectory #-} 16 | 17 | cabalDirectory :: FilePath 18 | cabalDirectory = appDataDirectory "cabal" 19 | -------------------------------------------------------------------------------- /app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import App.Commands (commands) 4 | import HaskellWorks.Prelude 5 | 6 | import qualified Options.Applicative as OA 7 | 8 | main :: IO () 9 | main = join $ OA.customExecParser 10 | (OA.prefs $ OA.showHelpOnEmpty <> OA.showHelpOnError) 11 | (OA.info (commands <**> OA.helper) OA.idm) 12 | -------------------------------------------------------------------------------- /cabal-cache.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 3.4 2 | 3 | name: cabal-cache 4 | version: 1.2.0.0 5 | synopsis: CI Assistant for Haskell projects 6 | description: CI Assistant for Haskell projects. Implements package caching. 7 | homepage: https://github.com/haskell-works/cabal-cache 8 | license: BSD-3-Clause 9 | license-file: LICENSE 10 | author: John Ky 11 | maintainer: newhoggy@gmail.com 12 | copyright: John Ky 2019-2023 13 | category: Development 14 | tested-with: GHC == 9.6.6, GHC == 9.4.8, GHC == 9.2.8 15 | extra-doc-files: CHANGELOG.md 16 | README.md 17 | 18 | source-repository head 19 | type: git 20 | location: https://github.com/haskell-works/cabal-cache 21 | 22 | common base { build-depends: base >= 4.7 && < 5 } 23 | 24 | common aeson { build-depends: aeson >= 2 && < 2.3 } 25 | common amazonka { build-depends: amazonka >= 2 && < 3 } 26 | common amazonka-core { build-depends: amazonka-core >= 2 && < 3 } 27 | common amazonka-s3 { build-depends: amazonka-s3 >= 2 && < 3 } 28 | common attoparsec { build-depends: attoparsec >= 0.14 && < 0.15 } 29 | common bytestring { build-depends: bytestring >= 0.10.8.2 && < 0.13 } 30 | common cabal-install-parsers { build-depends: cabal-install-parsers >= 0.6.1 && < 0.7 } 31 | common conduit-extra { build-depends: conduit-extra >= 1.3.1.1 && < 1.4 } 32 | common containers { build-depends: containers >= 0.6.0.1 && < 0.7 } 33 | common cryptonite { build-depends: cryptonite >= 0.25 && < 1 } 34 | common deepseq { build-depends: deepseq >= 1.4.4.0 && < 1.6 } 35 | common directory { build-depends: directory >= 1.3.3.0 && < 1.4 } 36 | common effectful { build-depends: effectful >= 2.5 && < 3 } 37 | common effectful-core { build-depends: effectful-core >= 2.5 && < 3 } 38 | common effectful-plugin { build-depends: effectful-plugin >= 1.1.0.4 && < 2 } 39 | common effectful-zoo-amazonka { build-depends: effectful-zoo:amazonka >= 0.0.1.0 && < 0.1 } 40 | common effectful-zoo-console { build-depends: effectful-zoo:console >= 0.0.1.0 && < 0.1 } 41 | common effectful-zoo-core { build-depends: effectful-zoo:core >= 0.0.1.0 && < 0.1 } 42 | common effectful-zoo-datalog { build-depends: effectful-zoo:datalog >= 0.0.1.0 && < 0.1 } 43 | common effectful-zoo-hedgehog { build-depends: effectful-zoo:hedgehog >= 0.0.1.0 && < 0.1 } 44 | common effectful-zoo-hunit { build-depends: effectful-zoo:hunit >= 0.0.1.0 && < 0.1 } 45 | common effectful-zoo-lazy { build-depends: effectful-zoo:lazy >= 0.0.1.0 && < 0.1 } 46 | common effectful-zoo-log { build-depends: effectful-zoo:log >= 0.0.1.0 && < 0.1 } 47 | common exceptions { build-depends: exceptions >= 0.10.1 && < 0.11 } 48 | common filepath { build-depends: filepath >= 1.3 && < 1.6 } 49 | common generic-lens { build-depends: generic-lens >= 1.1.0.0 && < 2.3 } 50 | common Glob { build-depends: Glob >= 0.10.2 && < 0.11 } 51 | common hedgehog { build-depends: hedgehog >= 1.5 && < 1.6 } 52 | common hedgehog-extras { build-depends: hedgehog-extras >= 0.4 && < 0.7 } 53 | common hspec { build-depends: hspec >= 2.4 && < 3 } 54 | common http-client { build-depends: http-client >= 0.5.14 && < 0.8 } 55 | common http-client-tls { build-depends: http-client-tls >= 0.3 && < 0.4 } 56 | common http-types { build-depends: http-types >= 0.12.3 && < 0.13 } 57 | common hw-hedgehog { build-depends: hw-hedgehog >= 0.1.0.3 && < 0.2 } 58 | common hw-hspec-hedgehog { build-depends: hw-hspec-hedgehog >= 0.1.0.4 && < 0.2 } 59 | common hw-prelude { build-depends: hw-prelude >= 0.0.4.0 && < 0.1 } 60 | common lens { build-depends: lens >= 4.17 && < 6 } 61 | common mtl { build-depends: mtl >= 2.2.2 && < 2.4 } 62 | common network-uri { build-depends: network-uri >= 2.6.4.1 && < 2.8 } 63 | common optparse-applicative { build-depends: optparse-applicative >= 0.14 && < 0.19 } 64 | common microlens { build-depends: microlens >= 0.4.13.1 && < 0.5 } 65 | common process { build-depends: process >= 1.6.5.0 && < 1.7 } 66 | common raw-strings-qq { build-depends: raw-strings-qq >= 1.1 && < 2 } 67 | common relation { build-depends: relation >= 0.5 && < 0.6 } 68 | common resourcet { build-depends: resourcet >= 1.2.2 && < 1.4 } 69 | common resourcet-effectful { build-depends: resourcet-effectful >= 1.0.1.0 && < 1.1 } 70 | common selective { build-depends: selective >= 0.1.0 && < 0.8 } 71 | common stm { build-depends: stm >= 2.5.0.0 && < 3 } 72 | common stringsearch { build-depends: stringsearch >= 0.3.6.6 && < 0.4 } 73 | common tar { build-depends: tar >= 0.5.1.0 && < 0.7 } 74 | common temporary { build-depends: temporary >= 1.3 && < 1.4 } 75 | common text { build-depends: text >= 1.2.3.1 && < 2.2 } 76 | common time { build-depends: time >= 1.4 && < 1.13 } 77 | common topograph { build-depends: topograph >= 1 && < 2 } 78 | common transformers { build-depends: transformers >= 0.5.6.2 && < 0.7 } 79 | common unliftio { build-depends: unliftio >= 0.2.10 && < 0.3 } 80 | common zlib { build-depends: zlib >= 0.6.2 && < 0.8 } 81 | 82 | flag dev 83 | description: Enable development settings like -Werror 84 | default: False 85 | manual: True 86 | 87 | common project-config 88 | default-language: Haskell2010 89 | default-extensions: AllowAmbiguousTypes 90 | ApplicativeDo 91 | BlockArguments 92 | DataKinds 93 | DefaultSignatures 94 | DeriveGeneric 95 | DeriveAnyClass 96 | DerivingStrategies 97 | DerivingVia 98 | DisambiguateRecordFields 99 | DuplicateRecordFields 100 | FlexibleContexts 101 | FunctionalDependencies 102 | GADTs 103 | ImportQualifiedPost 104 | LambdaCase 105 | LiberalTypeSynonyms 106 | MonadComprehensions 107 | MonoLocalBinds 108 | MultiWayIf 109 | NoFieldSelectors 110 | NoImplicitPrelude 111 | OverloadedLabels 112 | OverloadedRecordDot 113 | OverloadedStrings 114 | PackageImports 115 | PartialTypeSignatures 116 | PatternSynonyms 117 | QuantifiedConstraints 118 | QuasiQuotes 119 | RecordWildCards 120 | RecursiveDo 121 | RoleAnnotations 122 | ScopedTypeVariables 123 | TupleSections 124 | TypeFamilies 125 | TypeFamilyDependencies 126 | TypeApplications 127 | TypeOperators 128 | UndecidableInstances 129 | UnicodeSyntax 130 | ViewPatterns 131 | ghc-options: -Wall 132 | -Wincomplete-record-updates 133 | -Wincomplete-uni-patterns 134 | -- -Wno-incomplete-patterns 135 | -Wtabs 136 | -Wunused-packages 137 | if flag(dev) 138 | ghc-options: -Werror 139 | 140 | library 141 | import: base, project-config, 142 | aeson, 143 | amazonka, 144 | amazonka-core, 145 | amazonka-s3, 146 | attoparsec, 147 | bytestring, 148 | conduit-extra, 149 | containers, 150 | cryptonite, 151 | deepseq, 152 | directory, 153 | effectful, 154 | effectful-core, 155 | effectful-plugin, 156 | effectful-zoo-amazonka, 157 | effectful-zoo-console, 158 | effectful-zoo-core, 159 | effectful-zoo-datalog, 160 | effectful-zoo-lazy, 161 | effectful-zoo-log, 162 | exceptions, 163 | filepath, 164 | generic-lens, 165 | http-client, 166 | http-client-tls, 167 | http-types, 168 | hw-prelude, 169 | microlens, 170 | network-uri, 171 | optparse-applicative, 172 | process, 173 | relation, 174 | resourcet, 175 | resourcet-effectful, 176 | stm, 177 | text, 178 | topograph, 179 | other-modules: Paths_cabal_cache 180 | autogen-modules: Paths_cabal_cache 181 | hs-source-dirs: src 182 | exposed-modules: HaskellWorks.CabalCache.AppError 183 | HaskellWorks.CabalCache.AWS.Env 184 | HaskellWorks.CabalCache.AWS.Error 185 | HaskellWorks.CabalCache.AWS.S3 186 | HaskellWorks.CabalCache.AWS.S3.URI 187 | HaskellWorks.CabalCache.Concurrent.DownloadQueue 188 | HaskellWorks.CabalCache.Concurrent.Fork 189 | HaskellWorks.CabalCache.Concurrent.Type 190 | HaskellWorks.CabalCache.Core 191 | HaskellWorks.CabalCache.Data.List 192 | HaskellWorks.CabalCache.Error 193 | HaskellWorks.CabalCache.Exit 194 | HaskellWorks.CabalCache.GhcPkg 195 | HaskellWorks.CabalCache.Hash 196 | HaskellWorks.CabalCache.IO.Console 197 | HaskellWorks.CabalCache.IO.File 198 | HaskellWorks.CabalCache.IO.Lazy 199 | HaskellWorks.CabalCache.IO.Tar 200 | HaskellWorks.CabalCache.Location 201 | HaskellWorks.CabalCache.Metadata 202 | HaskellWorks.CabalCache.Options 203 | HaskellWorks.CabalCache.Store 204 | HaskellWorks.CabalCache.Text 205 | HaskellWorks.CabalCache.Topology 206 | HaskellWorks.CabalCache.Types 207 | HaskellWorks.CabalCache.URI 208 | HaskellWorks.CabalCache.Version 209 | ghc-options: -fplugin=Effectful.Plugin 210 | 211 | executable cabal-cache 212 | import: base, project-config, 213 | aeson, 214 | amazonka, 215 | amazonka-core, 216 | bytestring, 217 | cabal-install-parsers, 218 | containers, 219 | directory, 220 | effectful-core, 221 | effectful-plugin, 222 | effectful-zoo-amazonka, 223 | effectful-zoo-core, 224 | effectful-zoo-datalog, 225 | effectful-zoo-lazy, 226 | effectful-zoo-log, 227 | effectful, 228 | exceptions, 229 | filepath, 230 | generic-lens, 231 | hw-prelude, 232 | lens, 233 | network-uri, 234 | optparse-applicative, 235 | resourcet-effectful, 236 | stm, 237 | stringsearch, 238 | temporary, 239 | text, 240 | build-depends: cabal-cache 241 | main-is: Main.hs 242 | hs-source-dirs: app 243 | other-modules: App.Amazonka 244 | App.Commands 245 | App.Commands.Options.Parser 246 | App.Commands.Debug 247 | App.Commands.Debug.S3 248 | App.Commands.Debug.S3.Cp 249 | App.Commands.Options.Types 250 | App.Commands.Plan 251 | App.Commands.SyncFromArchive 252 | App.Commands.SyncToArchive 253 | App.Commands.Version 254 | App.Run 255 | App.Static 256 | App.Static.Base 257 | App.Static.Posix 258 | App.Static.Windows 259 | Paths_cabal_cache 260 | autogen-modules: Paths_cabal_cache 261 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 262 | ghc-options: -fplugin=Effectful.Plugin 263 | 264 | test-suite cabal-cache-test 265 | import: base, project-config, 266 | aeson, 267 | amazonka, 268 | bytestring, 269 | directory, 270 | effectful-core, 271 | effectful-plugin, 272 | effectful-zoo-amazonka, 273 | effectful-zoo-core, 274 | effectful-zoo-hedgehog, 275 | effectful-zoo-hunit, 276 | effectful-zoo-lazy, 277 | effectful, 278 | exceptions, 279 | filepath, 280 | generic-lens, 281 | Glob, 282 | hedgehog-extras, 283 | hedgehog, 284 | hspec, 285 | hw-hspec-hedgehog, 286 | hw-prelude, 287 | microlens, 288 | network-uri, 289 | raw-strings-qq, 290 | resourcet-effectful, 291 | text, 292 | time 293 | type: exitcode-stdio-1.0 294 | main-is: Spec.hs 295 | build-depends: cabal-cache 296 | hs-source-dirs: test 297 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 298 | build-tool-depends: hspec-discover:hspec-discover 299 | other-modules: HaskellWorks.CabalCache.AwsSpec 300 | HaskellWorks.CabalCache.IntegrationSpec 301 | HaskellWorks.CabalCache.LocationSpec 302 | HaskellWorks.CabalCache.QuerySpec 303 | Test.Base 304 | ghc-options: -fplugin=Effectful.Plugin 305 | -------------------------------------------------------------------------------- /cabal.project: -------------------------------------------------------------------------------- 1 | packages: . 2 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/AWS/Env.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.AWS.Env 2 | ( awsLogger 3 | , mkEnv 4 | ) where 5 | 6 | import Control.Concurrent (myThreadId) 7 | import Data.ByteString.Builder (toLazyByteString) 8 | import Effectful.Zoo.Amazonka.Data.AwsEnv 9 | import HaskellWorks.Prelude 10 | import Network.HTTP.Client (HttpException (..), HttpExceptionContent (..)) 11 | 12 | import qualified Amazonka as AWS 13 | import qualified Data.ByteString as BS 14 | import qualified Data.ByteString.Lazy as L 15 | import qualified Data.ByteString.Lazy as LBS 16 | import qualified Data.ByteString.Lazy.Char8 as LC8 17 | import qualified Data.Text.Encoding as T 18 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 19 | import qualified System.IO as IO 20 | 21 | mkEnv :: AWS.Region -> (AWS.LogLevel -> LBS.ByteString -> IO ()) -> IO AwsEnv 22 | mkEnv region lg = do 23 | lgr <- newAwsLogger lg 24 | discoveredEnv <- AWS.newEnv AWS.discover 25 | 26 | pure discoveredEnv 27 | { AWS.logger = lgr 28 | , AWS.region = region 29 | , AWS.retryCheck = retryPolicy 5 30 | } 31 | 32 | newAwsLogger :: Monad m => (AWS.LogLevel -> LBS.ByteString -> IO ()) -> m AWS.Logger 33 | newAwsLogger lg = return $ \y b -> 34 | let lazyMsg = toLazyByteString b 35 | in case L.toStrict lazyMsg of 36 | msg | BS.isInfixOf "404 Not Found" msg -> lg AWS.Debug lazyMsg 37 | msg | BS.isInfixOf "304 Not Modified" msg -> lg AWS.Debug lazyMsg 38 | _ -> lg y lazyMsg 39 | 40 | retryPolicy :: Int -> Int -> AWS.HttpException -> Bool 41 | retryPolicy maxNum attempt ex = (attempt <= maxNum) && shouldRetry ex 42 | 43 | shouldRetry :: AWS.HttpException -> Bool 44 | shouldRetry ex = case ex of 45 | HttpExceptionRequest _ ctx -> case ctx of 46 | ResponseTimeout -> True 47 | ConnectionTimeout -> True 48 | ConnectionFailure _ -> True 49 | InvalidChunkHeaders -> True 50 | ConnectionClosed -> True 51 | InternalException _ -> True 52 | NoResponseDataReceived -> True 53 | ResponseBodyTooShort _ _ -> True 54 | _ -> False 55 | _ -> False 56 | 57 | awsLogger :: Maybe AWS.LogLevel -> AWS.LogLevel -> LC8.ByteString -> IO () 58 | awsLogger maybeConfigLogLevel msgLogLevel message = 59 | forM_ maybeConfigLogLevel $ \configLogLevel -> 60 | when (msgLogLevel <= configLogLevel) do 61 | threadId <- myThreadId 62 | CIO.hPutStrLn IO.stderr $ "[" <> tshow msgLogLevel <> "] [tid: " <> tshow threadId <> "]" <> text 63 | where text = T.decodeUtf8 $ LBS.toStrict message 64 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/AWS/Error.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.AWS.Error 2 | ( handleAwsStatusError 3 | ) where 4 | 5 | import Effectful 6 | import Effectful.Zoo.Amazonka.Data.AwsError 7 | import Effectful.Zoo.Core 8 | import Effectful.Zoo.Core.Error.Static 9 | import HaskellWorks.CabalCache.AppError (AwsStatusError(..)) 10 | import HaskellWorks.Prelude 11 | 12 | import qualified Amazonka as AWS 13 | import qualified Network.HTTP.Types as HTTP 14 | 15 | {- HLINT ignore "Redundant do" -} 16 | {- HLINT ignore "Reduce duplication" -} 17 | {- HLINT ignore "Redundant bracket" -} 18 | 19 | handleAwsStatusError :: () 20 | => r <: Error AwsError 21 | => r <: Error AwsStatusError 22 | => Eff r a 23 | -> Eff r a 24 | handleAwsStatusError f = f & trapIn @AwsError \case 25 | (AWS.ServiceError (AWS.ServiceError' _ s@(HTTP.Status _ _) _ _ _ _)) -> throw $ AwsStatusError s 26 | e -> throw e 27 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/AWS/S3.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.AWS.S3 2 | ( uriToS3Uri, 3 | headS3Uri, 4 | getS3Uri, 5 | copyS3Uri, 6 | putObject, 7 | 8 | ) where 9 | 10 | import Amazonka (ResponseBody) 11 | import Amazonka.Data (ToText(..), fromText) 12 | import Control.Monad.Trans.Resource (MonadResource, liftResourceT) 13 | import Data.Conduit.Lazy (lazyConsume) 14 | import Data.Generics.Product.Any (the) 15 | import Effectful 16 | import Effectful.Resource 17 | import Effectful.Zoo.Amazonka.Api.Send 18 | import Effectful.Zoo.Amazonka.Data 19 | import Effectful.Zoo.Core 20 | import Effectful.Zoo.Core.Error.Static 21 | import Effectful.Zoo.Lazy.Dynamic 22 | import Effectful.Zoo.DataLog.Dynamic 23 | import HaskellWorks.CabalCache.AppError (AwsStatusError(..)) 24 | import HaskellWorks.CabalCache.Error (CopyFailed(..), UnsupportedUri(..)) 25 | import HaskellWorks.Prelude 26 | import Lens.Micro 27 | import Network.URI (URI) 28 | 29 | import qualified Amazonka as AWS 30 | import qualified Amazonka.S3 as AWS 31 | import qualified Data.ByteString.Lazy as LBS 32 | import qualified HaskellWorks.CabalCache.AWS.Error as AWS 33 | import qualified HaskellWorks.CabalCache.AWS.S3.URI as AWS 34 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 35 | import qualified HaskellWorks.CabalCache.URI as URI 36 | import qualified System.IO as IO 37 | 38 | --- | Access the response body as a lazy bytestring 39 | lazyByteString :: MonadResource m => ResponseBody -> m LBS.ByteString 40 | lazyByteString rsBody = liftResourceT $ LBS.fromChunks <$> lazyConsume rsBody.body 41 | 42 | unsafeDownloadRequest :: () 43 | => r <: DataLog AwsLogEntry 44 | => r <: Error AwsError 45 | => r <: IOE 46 | => r <: Lazy AwsEnv 47 | => r <: Resource 48 | => AWS.GetObject 49 | -> Eff r LBS.ByteString 50 | unsafeDownloadRequest req = do 51 | resp <- lazySendAws req 52 | lazyByteString $ resp ^. the @"body" 53 | 54 | unsafeDownload :: () 55 | => r <: DataLog AwsLogEntry 56 | => r <: Error AwsError 57 | => r <: IOE 58 | => r <: Lazy AwsEnv 59 | => r <: Resource 60 | => AWS.BucketName 61 | -> AWS.ObjectKey 62 | -> Eff r LBS.ByteString 63 | unsafeDownload bucketName objectKey = 64 | unsafeDownloadRequest (AWS.newGetObject bucketName objectKey) 65 | 66 | uriToS3Uri :: URI -> Either UnsupportedUri AWS.S3Uri 67 | uriToS3Uri uri = case fromText @AWS.S3Uri (tshow uri) of 68 | Right s3Uri -> Right s3Uri 69 | Left msg -> Left $ UnsupportedUri uri $ "Unable to parse URI" <> tshow msg 70 | 71 | headS3Uri :: () 72 | => r <: DataLog AwsLogEntry 73 | => r <: Error AwsError 74 | => r <: Error AwsStatusError 75 | => r <: Error UnsupportedUri 76 | => r <: IOE 77 | => r <: Lazy AwsEnv 78 | => r <: Resource 79 | => URI 80 | -> Eff r AWS.HeadObjectResponse 81 | headS3Uri uri = do 82 | AWS.S3Uri b k <- uriToS3Uri (URI.reslashUri uri) 83 | & onLeft throw 84 | 85 | AWS.handleAwsStatusError $ lazySendAws $ AWS.newHeadObject b k 86 | 87 | putObject :: () 88 | => r <: DataLog AwsLogEntry 89 | => r <: Error AwsError 90 | => r <: Error AwsStatusError 91 | => r <: Error UnsupportedUri 92 | => r <: IOE 93 | => r <: Lazy AwsEnv 94 | => r <: Resource 95 | => AWS.ToBody a 96 | => URI 97 | -> a 98 | -> Eff r () 99 | putObject uri lbs = do 100 | AWS.S3Uri b k <- uriToS3Uri (URI.reslashUri uri) 101 | & onLeft throw 102 | 103 | let req = AWS.toBody lbs 104 | let po = AWS.newPutObject b k req 105 | 106 | AWS.handleAwsStatusError $ void $ lazySendAws po 107 | 108 | getS3Uri :: () 109 | => r <: DataLog AwsLogEntry 110 | => r <: Error AwsError 111 | => r <: Error AwsStatusError 112 | => r <: Error UnsupportedUri 113 | => r <: IOE 114 | => r <: Lazy AwsEnv 115 | => r <: Resource 116 | => URI 117 | -> Eff r LBS.ByteString 118 | getS3Uri uri = do 119 | AWS.S3Uri b k <- uriToS3Uri (URI.reslashUri uri) 120 | & onLeft throw 121 | 122 | AWS.handleAwsStatusError $ unsafeDownload b k 123 | 124 | copyS3Uri :: () 125 | => r <: DataLog AwsLogEntry 126 | => r <: Error AwsError 127 | => r <: Error AwsStatusError 128 | => r <: Error CopyFailed 129 | => r <: Error UnsupportedUri 130 | => r <: IOE 131 | => r <: Lazy AwsEnv 132 | => r <: Resource 133 | => URI 134 | -> URI 135 | -> Eff r () 136 | copyS3Uri source target = do 137 | AWS.S3Uri sourceBucket sourceObjectKey <- uriToS3Uri (URI.reslashUri source) & onLeft throw 138 | AWS.S3Uri targetBucket targetObjectKey <- uriToS3Uri (URI.reslashUri target) & onLeft throw 139 | let copyObjectRequest = AWS.newCopyObject targetBucket (toText sourceBucket <> "/" <> toText sourceObjectKey) targetObjectKey 140 | response <- lazySendAws copyObjectRequest 141 | let responseCode = response ^. the @"httpStatus" 142 | unless (200 <= responseCode && responseCode < 300) do 143 | liftIO $ CIO.hPutStrLn IO.stderr $ "Error in S3 copy: " <> tshow response 144 | throw CopyFailed 145 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/AWS/S3/URI.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.AWS.S3.URI 2 | ( S3Uri(..) 3 | ) where 4 | 5 | import Control.DeepSeq (NFData) 6 | import HaskellWorks.Prelude 7 | 8 | import qualified Amazonka.Data.Text as AWS 9 | import qualified Amazonka.S3 as AWS 10 | import qualified Data.Aeson as J 11 | import qualified Data.Aeson.Types as J 12 | import qualified Data.Attoparsec.Combinator as DAC 13 | import qualified Data.Attoparsec.Text as DAT 14 | import qualified Data.Text as T 15 | 16 | data S3Uri = S3Uri 17 | { bucket :: AWS.BucketName 18 | , objectKey :: AWS.ObjectKey 19 | } deriving (Show, Eq, Ord, Generic, NFData) 20 | 21 | instance AWS.FromText S3Uri where 22 | fromText = DAT.parseOnly $ do 23 | _ <- DAT.string "s3://" 24 | bn <- AWS.BucketName . T.pack <$> DAC.many1 (DAT.satisfy (\c -> c /= '/' && c /= ' ')) 25 | _ <- optional (DAT.char '/') 26 | ok <- AWS.ObjectKey . T.pack <$> many DAT.anyChar 27 | DAT.endOfInput 28 | return (S3Uri bn ok) 29 | 30 | instance AWS.ToText S3Uri where 31 | toText loc = toS3Uri loc.bucket loc.objectKey 32 | 33 | instance J.ToJSON S3Uri where 34 | toJSON s3Uri = J.String (AWS.toText s3Uri) 35 | 36 | instance J.FromJSON S3Uri where 37 | parseJSON v = case v of 38 | J.String s -> case AWS.fromText s of 39 | Right s3Uri -> return s3Uri 40 | Left msg -> J.typeMismatch ("S3Uri (" <> msg <> ")") v 41 | _ -> J.typeMismatch "S3Uri" v 42 | 43 | toS3Uri :: AWS.BucketName -> AWS.ObjectKey -> Text 44 | toS3Uri (AWS.BucketName b) (AWS.ObjectKey k) = "s3://" <> b <> "/" <> k 45 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/AppError.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.AppError 2 | ( AwsStatusError(..), 3 | HttpError(..), 4 | HasStatusCode(..), 5 | HasMaybeStatusCode(..), 6 | displayAwsStatusError, 7 | displayHttpError, 8 | ) where 9 | 10 | import HaskellWorks.Prelude 11 | 12 | import qualified Network.HTTP.Client as HTTP 13 | import qualified Network.HTTP.Types as HTTP 14 | 15 | newtype AwsStatusError = AwsStatusError 16 | { status :: HTTP.Status 17 | } 18 | deriving (Eq, Show, Generic) 19 | 20 | data HttpError = HttpError 21 | { reasponse :: HTTP.Request 22 | , content :: HTTP.HttpExceptionContent 23 | } 24 | deriving (Show, Generic) 25 | 26 | displayAwsStatusError :: AwsStatusError -> Text 27 | displayAwsStatusError (AwsStatusError s) = tshow s 28 | 29 | displayHttpError :: HttpError -> Text 30 | displayHttpError (HttpError _ s) = tshow s 31 | 32 | class HasStatusCode a where 33 | statusCodeOf :: a -> Int 34 | 35 | class HasMaybeStatusCode a where 36 | maybeStatusCodeOf :: a -> Maybe Int 37 | 38 | instance HasStatusCode AwsStatusError where 39 | statusCodeOf (AwsStatusError (HTTP.Status c _)) = c 40 | 41 | instance HasMaybeStatusCode AwsStatusError where 42 | maybeStatusCodeOf (AwsStatusError (HTTP.Status c _)) = Just c 43 | 44 | instance HasMaybeStatusCode HttpError where 45 | maybeStatusCodeOf (HttpError _ content') = case content' of 46 | HTTP.StatusCodeException response _ -> let HTTP.Status c _ = HTTP.responseStatus response in Just c 47 | _ -> Nothing 48 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Concurrent/DownloadQueue.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Concurrent.DownloadQueue 2 | ( DownloadStatus(..), 3 | createDownloadQueue, 4 | runQueue, 5 | downloadSucceed, 6 | downloadFail, 7 | ) where 8 | 9 | import Control.Monad.Catch (MonadMask(..)) 10 | import Data.Set ((\\)) 11 | import Effectful 12 | import Effectful.Zoo.Core 13 | import Effectful.Zoo.Core.Error.Static 14 | import Prelude hiding (fail) 15 | import HaskellWorks.Prelude 16 | 17 | import qualified Control.Concurrent.STM as STM 18 | import qualified Control.Monad.Catch as CMC 19 | import qualified Data.Relation as R 20 | import qualified Data.Set as S 21 | import qualified HaskellWorks.CabalCache.Concurrent.Type as Z 22 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 23 | import qualified System.IO as IO 24 | 25 | data DownloadStatus = DownloadSuccess | DownloadFailure deriving (Eq, Show) 26 | 27 | downloadSucceed :: forall a r. () 28 | => r <: Error DownloadStatus 29 | => Eff r a 30 | downloadSucceed = 31 | throw DownloadSuccess 32 | 33 | downloadFail :: forall a r. () 34 | => r <: Error DownloadStatus 35 | => Eff r a 36 | downloadFail = 37 | throw DownloadFailure 38 | 39 | createDownloadQueue :: [(Z.ProviderId, Z.ConsumerId)] -> STM.STM Z.DownloadQueue 40 | createDownloadQueue dependencies = do 41 | tDependencies <- STM.newTVar (R.fromList dependencies) 42 | tUploading <- STM.newTVar S.empty 43 | tFailures <- STM.newTVar S.empty 44 | return Z.DownloadQueue {..} 45 | 46 | takeReady :: Z.DownloadQueue -> STM.STM (Maybe Z.PackageId) 47 | takeReady Z.DownloadQueue {..} = do 48 | dependencies <- STM.readTVar tDependencies 49 | uploading <- STM.readTVar tUploading 50 | failures <- STM.readTVar tFailures 51 | 52 | let ready = R.ran dependencies \\ R.dom dependencies \\ uploading \\ failures 53 | 54 | case S.lookupMin ready of 55 | Just packageId -> do 56 | STM.writeTVar tUploading (S.insert packageId uploading) 57 | return (Just packageId) 58 | Nothing -> if S.null (R.ran dependencies \\ R.dom dependencies \\ failures) 59 | then return Nothing 60 | else STM.retry 61 | 62 | commit :: Z.DownloadQueue -> Z.PackageId -> STM.STM () 63 | commit Z.DownloadQueue {..} packageId = do 64 | dependencies <- STM.readTVar tDependencies 65 | uploading <- STM.readTVar tUploading 66 | 67 | STM.writeTVar tUploading $ S.delete packageId uploading 68 | STM.writeTVar tDependencies $ R.withoutRan (S.singleton packageId) dependencies 69 | 70 | failDownload :: Z.DownloadQueue -> Z.PackageId -> STM.STM () 71 | failDownload Z.DownloadQueue {..} packageId = do 72 | uploading <- STM.readTVar tUploading 73 | failures <- STM.readTVar tFailures 74 | 75 | STM.writeTVar tUploading $ S.delete packageId uploading 76 | STM.writeTVar tFailures $ S.insert packageId failures 77 | 78 | runQueue :: (MonadIO m, MonadMask m) => Z.DownloadQueue -> (Z.PackageId -> m DownloadStatus) -> m () 79 | runQueue downloadQueue f = do 80 | maybePackageId <- liftIO $ STM.atomically $ takeReady downloadQueue 81 | 82 | case maybePackageId of 83 | Just packageId -> do 84 | downloadStatus <- f packageId 85 | & do CMC.handleAll \e -> do 86 | liftIO $ CIO.hPutStrLn IO.stderr $ "Warning: Unexpected exception during download of " <> packageId <> ": " <> tshow e 87 | liftIO $ IO.hFlush IO.stderr 88 | pure DownloadFailure 89 | case downloadStatus of 90 | DownloadSuccess -> do 91 | liftIO $ CIO.hPutStrLn IO.stderr $ "Downloaded " <> packageId 92 | liftIO $ STM.atomically $ commit downloadQueue packageId 93 | DownloadFailure -> do 94 | liftIO $ CIO.hPutStrLn IO.stderr $ "Failed to download " <> packageId 95 | liftIO $ STM.atomically $ failDownload downloadQueue packageId 96 | runQueue downloadQueue f 97 | 98 | Nothing -> return () 99 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Concurrent/Fork.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Concurrent.Fork 2 | ( forkThreadsWait, 3 | ) where 4 | 5 | import Effectful 6 | import Effectful.Concurrent 7 | import Effectful.Concurrent.STM 8 | import Effectful.Exception 9 | import Effectful.Zoo.Core 10 | import HaskellWorks.Prelude 11 | 12 | forkThreadsWait :: () 13 | => r <: Concurrent 14 | => Int 15 | -> Eff r () 16 | -> Eff r () 17 | forkThreadsWait n f = do 18 | tDone <- newTVarIO (0 :: Int) 19 | 20 | forM_ [1 .. n] $ \_ -> forkIO do 21 | f `finally` atomically (modifyTVar tDone (+1)) 22 | 23 | atomically do 24 | done <- readTVar tDone 25 | when (done < n) retry 26 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Concurrent/Type.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Concurrent.Type 2 | ( DownloadQueue(..), 3 | ConsumerId, 4 | ProviderId, 5 | PackageId, 6 | ) where 7 | 8 | import GHC.Generics (Generic) 9 | import HaskellWorks.CabalCache.Types (PackageId) 10 | 11 | import qualified Control.Concurrent.STM as STM 12 | import qualified Data.Relation as R 13 | import qualified Data.Set as S 14 | 15 | type ConsumerId = PackageId 16 | type ProviderId = PackageId 17 | 18 | data DownloadQueue = DownloadQueue 19 | { tDependencies :: STM.TVar (R.Relation ConsumerId ProviderId) 20 | , tUploading :: STM.TVar (S.Set PackageId) 21 | , tFailures :: STM.TVar (S.Set PackageId) 22 | } deriving Generic 23 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Core.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Core 2 | ( PackageInfo(..), 3 | Tagged(..), 4 | Presence(..), 5 | getPackages, 6 | relativePaths, 7 | loadPlan, 8 | mkCompilerContext, 9 | ) where 10 | 11 | import Control.DeepSeq (NFData) 12 | import Data.Aeson (eitherDecode) 13 | import Effectful 14 | import Effectful.Zoo.Core 15 | import Effectful.Zoo.Core.Error.Static 16 | import Effectful.Zoo.Core.Exception 17 | import HaskellWorks.CabalCache.Error (DecodeError(..)) 18 | import HaskellWorks.Prelude 19 | import System.FilePath ((<.>), ()) 20 | 21 | import qualified Data.ByteString.Lazy as LBS 22 | import qualified Data.List as L 23 | import qualified Data.Text as T 24 | import qualified HaskellWorks.CabalCache.IO.Tar as IO 25 | import qualified HaskellWorks.CabalCache.Types as Z 26 | import qualified System.Directory as IO 27 | import qualified System.Info as I 28 | import qualified System.Process as IO 29 | 30 | {- HLINT ignore "Monoid law, left identity" -} 31 | 32 | type PackageDir = FilePath 33 | type ConfPath = FilePath 34 | type Library = FilePath 35 | 36 | data Presence = Present | Absent deriving (Eq, Show, NFData, Generic) 37 | 38 | data Tagged a t = Tagged 39 | { value :: a 40 | , tag :: t 41 | } deriving (Eq, Show, Generic, NFData) 42 | 43 | data PackageInfo = PackageInfo 44 | { compilerId :: Z.CompilerId 45 | , packageId :: Z.PackageId 46 | , packageName :: Z.PackageName 47 | , packageDir :: PackageDir 48 | , confPath :: Tagged ConfPath Presence 49 | , libs :: [Library] 50 | } deriving (Show, Eq, Generic, NFData) 51 | 52 | isPosix :: Bool 53 | isPosix = I.os /= "mingw32" 54 | {-# NOINLINE isPosix #-} 55 | 56 | exeExt :: String 57 | exeExt 58 | | isPosix = "" 59 | | otherwise = ".exe" 60 | 61 | withExeExt :: FilePath -> FilePath 62 | withExeExt = (<.> exeExt) 63 | 64 | withExeExt' :: Text -> Text 65 | withExeExt' = T.pack . withExeExt . T.unpack 66 | 67 | findExecutable :: () 68 | => r <: Error Text 69 | => r <: IOE 70 | => Text 71 | -> Eff r Text 72 | findExecutable exe = 73 | liftIO (fmap T.pack <$> IO.findExecutable (T.unpack exe)) 74 | & onNothingM (throw (exe <> " is not in path")) 75 | 76 | runGhcPkg :: () 77 | => r <: Error Text 78 | => r <: IOE 79 | => Text 80 | -> [Text] 81 | -> Eff r Text 82 | runGhcPkg cmdExe args = catchIO (liftIO $ T.pack <$> IO.readProcess (T.unpack cmdExe) (fmap T.unpack args) "") $ 83 | \(e :: IOError) -> throw $ "Unable to run " <> cmdExe <> " " <> T.unwords args <> ": " <> tshow e 84 | 85 | verifyGhcPkgVersion :: () 86 | => r <: Error Text 87 | => r <: IOE 88 | => Text 89 | -> Text 90 | -> Eff r Text 91 | verifyGhcPkgVersion version cmdExe = do 92 | stdout <- runGhcPkg cmdExe ["--version"] 93 | if T.isSuffixOf (" " <> version) (mconcat (L.take 1 (T.lines stdout))) 94 | then return cmdExe 95 | else throw $ cmdExe <> " is not of version " <> version 96 | 97 | mkCompilerContext :: () 98 | => r <: Error Text 99 | => r <: IOE 100 | => Z.PlanJson 101 | -> Eff r Z.CompilerContext 102 | mkCompilerContext plan = do 103 | compilerVersion <- T.stripPrefix "ghc-" plan.compilerId 104 | & onNothing (throw @Text "No compiler version available in plan") 105 | 106 | let versionedGhcPkgCmd = "ghc-pkg-" <> compilerVersion 107 | 108 | ghcPkgCmdPath <- (findExecutable (withExeExt' versionedGhcPkgCmd) >>= verifyGhcPkgVersion compilerVersion) 109 | & trap_ @Text (findExecutable (withExeExt' "ghc-pkg" ) >>= verifyGhcPkgVersion compilerVersion) 110 | 111 | return (Z.CompilerContext [T.unpack ghcPkgCmdPath]) 112 | 113 | relativePaths :: FilePath -> PackageInfo -> [IO.TarGroup] 114 | relativePaths basePath pInfo = 115 | [ IO.TarGroup basePath $ mempty 116 | <> pInfo.libs 117 | <> [pInfo.packageDir] 118 | , IO.TarGroup basePath $ mempty 119 | <> ([pInfo.confPath] & L.filter (\c -> c.tag == Present) <&> (.value)) 120 | ] 121 | 122 | getPackages :: FilePath -> Z.PlanJson -> IO [PackageInfo] 123 | getPackages basePath planJson = forM packages (mkPackageInfo basePath compilerId') 124 | where compilerId' :: Text 125 | compilerId' = planJson.compilerId 126 | packages :: [Z.Package] 127 | packages = planJson.installPlan 128 | 129 | loadPlan :: () 130 | => r <: Error DecodeError 131 | => r <: IOE 132 | => FilePath 133 | -> Eff r Z.PlanJson 134 | loadPlan resolvedBuildPath = do 135 | lbs <- liftIO (LBS.readFile (resolvedBuildPath "cache" "plan.json")) 136 | a <- eitherDecode lbs 137 | & onLeft (throw . DecodeError . T.pack) 138 | 139 | pure do a :: Z.PlanJson 140 | 141 | ------------------------------------------------------------------------------- 142 | mkPackageInfo :: FilePath -> Z.CompilerId -> Z.Package -> IO PackageInfo 143 | mkPackageInfo basePath cid pkg = do 144 | let pid = pkg.id 145 | let compilerPath = basePath T.unpack cid 146 | let relativeConfPath = T.unpack cid "package.db" T.unpack pid <.> ".conf" 147 | let absoluteConfPath = basePath relativeConfPath 148 | let libPath = compilerPath "lib" 149 | let relativeLibPath = T.unpack cid "lib" 150 | let libPrefix = "libHS" <> pid 151 | absoluteConfPathExists <- IO.doesFileExist absoluteConfPath 152 | libFiles <- getLibFiles relativeLibPath libPath libPrefix 153 | return PackageInfo 154 | { compilerId = cid 155 | , packageId = pid 156 | , packageName = pkg.name 157 | , packageDir = T.unpack cid T.unpack pid 158 | , confPath = Tagged relativeConfPath (bool Absent Present absoluteConfPathExists) 159 | , libs = libFiles 160 | } 161 | 162 | getLibFiles :: FilePath -> FilePath -> Text -> IO [Library] 163 | getLibFiles relativeLibPath libPath libPrefix = do 164 | libExists <- IO.doesDirectoryExist libPath 165 | if libExists 166 | then fmap (relativeLibPath ) . L.filter (L.isPrefixOf (T.unpack libPrefix)) <$> IO.listDirectory libPath 167 | else pure [] 168 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Data/List.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Data.List 2 | ( tuple2ToDL, 3 | tuple2ToList, 4 | tuple2ToNel, 5 | ) where 6 | 7 | import Data.List.NonEmpty (NonEmpty(..)) 8 | import HaskellWorks.Prelude 9 | 10 | tuple2ToDL :: (a, a) -> [a] -> [a] 11 | tuple2ToDL (a, b) = (a:) . (b:) 12 | 13 | tuple2ToList :: (a, a) -> [a] 14 | tuple2ToList ab = tuple2ToDL ab [] 15 | 16 | tuple2ToNel :: (a, a) -> NonEmpty a 17 | tuple2ToNel (a, b) = a :| [b] 18 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Error.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Error 2 | ( DecodeError(DecodeError), 3 | ExitFailure(ExitFailure), 4 | CopyFailed(CopyFailed), 5 | InvalidUrl(InvalidUrl), 6 | NotFound(NotFound), 7 | NotImplemented(NotImplemented), 8 | UnsupportedUri(UnsupportedUri), 9 | ) where 10 | 11 | import HaskellWorks.Prelude 12 | import Network.URI (URI) 13 | 14 | newtype DecodeError = DecodeError Text deriving (Eq, Show, Generic) 15 | 16 | data ExitFailure = ExitFailure deriving (Eq, Show, Generic) 17 | 18 | data CopyFailed = CopyFailed deriving (Eq, Show, Generic) 19 | 20 | data InvalidUrl = InvalidUrl 21 | { url :: Text 22 | , reason :: Text 23 | } deriving (Eq, Show, Generic) 24 | 25 | data NotFound = NotFound deriving (Eq, Show, Generic) 26 | 27 | newtype NotImplemented = NotImplemented Text deriving (Eq, Show, Generic) 28 | 29 | data UnsupportedUri = UnsupportedUri 30 | { uri :: URI 31 | , reason :: Text 32 | } deriving (Eq, Show, Generic) 33 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Exit.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Exit 2 | ( catchAndExitFailure, 3 | ) where 4 | 5 | import Effectful 6 | import Effectful.Zoo.Core 7 | import Effectful.Zoo.Core.Error.Static 8 | import Effectful.Zoo.Log.Api 9 | import Effectful.Zoo.Log.Dynamic 10 | import HaskellWorks.Prelude 11 | 12 | import qualified System.Exit as IO 13 | 14 | catchAndExitFailure :: forall e a r. () 15 | => Show e 16 | => r <: IOE 17 | => r <: Log Text 18 | => Eff (Error e : r) a 19 | -> Eff r a 20 | catchAndExitFailure f = 21 | f & trap @e \e -> do 22 | crit $ "Error: " <> tshow e 23 | liftIO IO.exitFailure 24 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/GhcPkg.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.GhcPkg 2 | ( system, 3 | runGhcPkg, 4 | testAvailability, 5 | recache, 6 | contextInit, 7 | ) where 8 | 9 | import HaskellWorks.Prelude 10 | import HaskellWorks.Unsafe 11 | import System.Exit (ExitCode (..), exitWith) 12 | import System.Process (waitForProcess) 13 | 14 | import qualified HaskellWorks.CabalCache.Types as Z 15 | import qualified System.IO as IO 16 | import qualified System.Process as IO 17 | 18 | system :: [String] -> IO IO.ProcessHandle 19 | system (cmd:args) = IO.spawnProcess cmd args 20 | system [] = error "No command supplied" -- TODO Better error handling 21 | 22 | runGhcPkg :: Z.CompilerContext -> [String] -> IO () 23 | runGhcPkg cc params = do 24 | hGhcPkg2 <- system (cc.ghcPkgCmd <> params) 25 | exitCodeGhcPkg2 <- waitForProcess hGhcPkg2 26 | case exitCodeGhcPkg2 of 27 | ExitFailure _ -> do 28 | IO.hPutStrLn IO.stderr "ERROR: Unable to recache package db" 29 | exitWith (ExitFailure 1) 30 | _ -> return () 31 | 32 | testAvailability :: Z.CompilerContext -> IO () 33 | testAvailability cc = runGhcPkg cc ["--version"] 34 | 35 | recache :: Z.CompilerContext -> FilePath -> IO () 36 | recache cc packageDb = runGhcPkg cc ["recache", "--package-db", packageDb] 37 | 38 | contextInit :: Z.CompilerContext -> FilePath -> IO () 39 | contextInit cc packageDb = runGhcPkg cc ["init", packageDb] 40 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Hash.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Hash 2 | ( hashStorePath 3 | ) where 4 | 5 | import HaskellWorks.Prelude 6 | 7 | import qualified Crypto.Hash as CH 8 | import qualified Data.List as L 9 | import qualified Data.Text as T 10 | import qualified Data.Text.Encoding as T 11 | 12 | hashStorePath :: String -> String 13 | hashStorePath = L.take 10 . show . CH.hashWith CH.SHA256 . T.encodeUtf8 . T.pack 14 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/IO/Console.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.IO.Console 2 | ( putStrLn, 3 | print, 4 | hPutStrLn, 5 | hPrint, 6 | ) where 7 | 8 | import Control.Exception (bracket_) 9 | import Control.Monad.IO.Class (MonadIO, liftIO) 10 | import Data.Text (Text) 11 | import Prelude (IO, Show (..), ($), (.)) 12 | 13 | import qualified Control.Concurrent.QSem as IO 14 | import qualified Data.Text.IO as T 15 | import qualified System.IO as IO 16 | import qualified System.IO.Unsafe as IO 17 | 18 | sem :: IO.QSem 19 | sem = IO.unsafePerformIO $ IO.newQSem 1 20 | {-# NOINLINE sem #-} 21 | 22 | consoleBracket :: IO a -> IO a 23 | consoleBracket = bracket_ (IO.waitQSem sem) (IO.signalQSem sem) 24 | 25 | putStrLn :: MonadIO m => Text -> m () 26 | putStrLn = liftIO . consoleBracket . T.putStrLn 27 | 28 | print :: (MonadIO m, Show a) => a -> m () 29 | print = liftIO . consoleBracket . IO.print 30 | 31 | hPutStrLn :: MonadIO m => IO.Handle -> Text -> m () 32 | hPutStrLn h = liftIO . consoleBracket . T.hPutStrLn h 33 | 34 | hPrint :: (MonadIO m, Show a) => IO.Handle -> a -> m () 35 | hPrint h = liftIO . consoleBracket . IO.hPrint h 36 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/IO/File.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.IO.File 2 | ( copyDirectoryRecursive, 3 | listMaybeDirectory, 4 | ) where 5 | 6 | import HaskellWorks.Prelude 7 | import Effectful 8 | import Effectful.Zoo.Console.Dynamic 9 | import Effectful.Zoo.Core 10 | import Effectful.Zoo.Core.Error.Static 11 | import Data.Text qualified as T 12 | import System.Directory qualified as IO 13 | import System.Exit qualified as IO 14 | import System.Process qualified as IO 15 | 16 | copyDirectoryRecursive :: () 17 | => MonadIO m 18 | => r <: IOE 19 | => r <: Console Text 20 | => r <: Error String 21 | => String 22 | -> String 23 | -> Eff r () 24 | copyDirectoryRecursive source target = do 25 | print $ "Copying recursively from " <> T.pack source <> " to " <> T.pack target 26 | process <- liftIO $ IO.spawnProcess "cp" ["-r", source, target] 27 | exitCode <- liftIO $ IO.waitForProcess process 28 | case exitCode of 29 | IO.ExitSuccess -> return () 30 | IO.ExitFailure n -> throw $ "cp exited with " <> show n 31 | 32 | listMaybeDirectory :: MonadIO m => FilePath -> m [FilePath] 33 | listMaybeDirectory filepath = do 34 | exists <- liftIO $ IO.doesDirectoryExist filepath 35 | if exists 36 | then liftIO $ IO.listDirectory filepath 37 | else return [] 38 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/IO/Lazy.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.IO.Lazy 2 | ( readResource, 3 | readFirstAvailableResource, 4 | resourceExists, 5 | writeResource, 6 | createLocalDirectoryIfMissing, 7 | linkOrCopyResource, 8 | readHttpUri, 9 | removePathRecursive, 10 | ) where 11 | 12 | import Data.Generics.Product.Any (HasAny(the)) 13 | import Data.List.NonEmpty (NonEmpty ((:|))) 14 | import Effectful 15 | import Effectful.Resource 16 | import Effectful.Zoo.Amazonka.Data.AwsEnv 17 | import Effectful.Zoo.Amazonka.Data.AwsError 18 | import Effectful.Zoo.Amazonka.Data.AwsLogEntry 19 | import Effectful.Zoo.Core 20 | import Effectful.Zoo.Core.Error.Static 21 | import Effectful.Zoo.Core.Exception 22 | import Effectful.Zoo.DataLog.Dynamic 23 | import Effectful.Zoo.Lazy.Dynamic 24 | import HaskellWorks.CabalCache.AppError (AwsStatusError(..), HttpError(..), statusCodeOf) 25 | import HaskellWorks.CabalCache.Error (CopyFailed(..), InvalidUrl(..), NotFound(..), NotImplemented(..), UnsupportedUri(..)) 26 | import HaskellWorks.CabalCache.Location (Location (..)) 27 | import HaskellWorks.Prelude 28 | import Lens.Micro 29 | import Network.URI (URI) 30 | 31 | import qualified Control.Concurrent as IO 32 | import qualified Data.ByteString.Lazy as LBS 33 | import qualified Data.List.NonEmpty as NEL 34 | import qualified Data.Text as T 35 | import qualified HaskellWorks.CabalCache.AWS.S3 as S3 36 | import qualified HaskellWorks.CabalCache.IO.Console as CIO 37 | import qualified HaskellWorks.CabalCache.URI as URI 38 | import qualified Network.HTTP.Client as HTTP 39 | import qualified Network.HTTP.Client.TLS as HTTPS 40 | import qualified System.Directory as IO 41 | import qualified System.FilePath.Posix as FP 42 | import qualified System.IO as IO 43 | import qualified System.IO.Error as IO 44 | 45 | {- HLINT ignore "Redundant do" -} 46 | {- HLINT ignore "Reduce duplication" -} 47 | {- HLINT ignore "Redundant bracket" -} 48 | 49 | handleHttpError :: () 50 | => r <: Error HttpError 51 | => r <: Error InvalidUrl 52 | => Eff r a 53 | -> Eff r a 54 | handleHttpError f = catchIO f $ \(e :: HTTP.HttpException) -> 55 | case e of 56 | HTTP.HttpExceptionRequest request content' -> throw $ HttpError request content' 57 | HTTP.InvalidUrlException url' reason' -> throw $ InvalidUrl (tshow url') (tshow reason') 58 | 59 | readResource :: () 60 | => r <: DataLog AwsLogEntry 61 | => r <: Error AwsError 62 | => r <: Error AwsStatusError 63 | => r <: Error HttpError 64 | => r <: Error InvalidUrl 65 | => r <: Error NotFound 66 | => r <: Error UnsupportedUri 67 | => r <: IOE 68 | => r <: Lazy AwsEnv 69 | => r <: Resource 70 | => Int 71 | -> Location 72 | -> Eff r LBS.ByteString 73 | readResource maxRetries = \case 74 | LocalFile path -> do 75 | fileExists <- liftIO $ IO.doesFileExist path 76 | if fileExists 77 | then liftIO $ LBS.readFile path 78 | else throw NotFound 79 | Uri uri -> retryS3 maxRetries $ case uri ^. the @"uriScheme" of 80 | "s3:" -> S3.getS3Uri (URI.reslashUri uri) 81 | "http:" -> readHttpUri (URI.reslashUri uri) 82 | "https:" -> readHttpUri (URI.reslashUri uri) 83 | scheme -> throw $ UnsupportedUri uri $ "Unrecognised uri scheme: " <> T.pack scheme 84 | 85 | readFirstAvailableResource :: () 86 | => r <: DataLog AwsLogEntry 87 | => r <: Error AwsError 88 | => r <: Error AwsStatusError 89 | => r <: Error HttpError 90 | => r <: Error InvalidUrl 91 | => r <: Error NotFound 92 | => r <: Error UnsupportedUri 93 | => r <: IOE 94 | => r <: Lazy AwsEnv 95 | => r <: Resource 96 | => NonEmpty Location 97 | -> Int 98 | -> Eff r (LBS.ByteString, Location) 99 | readFirstAvailableResource (a:|as) maxRetries = do 100 | (, a) <$> readResource maxRetries a 101 | & do trap @NotFound \e -> do 102 | case NEL.nonEmpty as of 103 | Nothing -> throw e 104 | Just nas -> readFirstAvailableResource nas maxRetries 105 | & do trap @AwsStatusError \e -> do 106 | case NEL.nonEmpty as of 107 | Nothing -> throw e 108 | Just nas -> readFirstAvailableResource nas maxRetries 109 | & do trap @HttpError \e -> do 110 | case NEL.nonEmpty as of 111 | Nothing -> throw e 112 | Just nas -> readFirstAvailableResource nas maxRetries 113 | 114 | safePathIsSymbolLink :: () 115 | => r <: IOE 116 | => FilePath 117 | -> Eff r Bool 118 | safePathIsSymbolLink filePath = 119 | catchIO (liftIO $ IO.pathIsSymbolicLink filePath) handler 120 | where handler :: IOError -> Eff r Bool 121 | handler e = if IO.isDoesNotExistError e 122 | then return False 123 | else return True 124 | 125 | resourceExists :: () 126 | => r <: DataLog AwsLogEntry 127 | => r <: Error AwsError 128 | => r <: Error AwsStatusError 129 | => r <: Error InvalidUrl 130 | => r <: Error UnsupportedUri 131 | => r <: IOE 132 | => r <: Lazy AwsEnv 133 | => r <: Resource 134 | => Location 135 | -> Eff r Bool 136 | resourceExists = \case 137 | LocalFile path -> do 138 | fileExists <- liftIO $ IO.doesFileExist path 139 | if fileExists 140 | then return True 141 | else do 142 | symbolicLinkExists <- safePathIsSymbolLink path 143 | if symbolicLinkExists 144 | then do 145 | target <- liftIO $ IO.getSymbolicLinkTarget path 146 | resourceExists (LocalFile target) 147 | else return False 148 | Uri uri -> case uri ^. the @"uriScheme" of 149 | "s3:" -> do 150 | (True <$ S3.headS3Uri (URI.reslashUri uri)) 151 | & trap_ @AwsStatusError (pure False) 152 | -- & trap_ @HttpError (pure False) 153 | "http:" -> do 154 | (True <$ headHttpUri (URI.reslashUri uri)) 155 | & trap_ @HttpError (pure False) 156 | & trap_ @AwsStatusError (pure False) 157 | _scheme -> return False 158 | 159 | writeResource :: () 160 | => r <: DataLog AwsLogEntry 161 | => r <: Error AwsError 162 | => r <: Error AwsStatusError 163 | => r <: Error HttpError 164 | => r <: Error NotImplemented 165 | => r <: Error UnsupportedUri 166 | => r <: IOE 167 | => r <: Lazy AwsEnv 168 | => r <: Resource 169 | => Location 170 | -> Int 171 | -> LBS.ByteString 172 | -> Eff r () 173 | writeResource loc maxRetries lbs = case loc of 174 | LocalFile path -> liftIO (LBS.writeFile path lbs) 175 | Uri uri' -> retryS3 maxRetries $ case uri' ^. the @"uriScheme" of 176 | "s3:" -> S3.putObject (URI.reslashUri uri') lbs 177 | "http:" -> throw $ NotImplemented "HTTP PUT method not supported" 178 | scheme -> throw $ UnsupportedUri uri' $ "Unrecognised uri scheme: " <> T.pack scheme 179 | 180 | createLocalDirectoryIfMissing :: MonadIO m => Location -> m () 181 | createLocalDirectoryIfMissing = \case 182 | LocalFile path -> liftIO $ IO.createDirectoryIfMissing True path 183 | Uri uri -> case uri ^. the @"uriScheme" of 184 | "s3:" -> return () 185 | "http:" -> return () 186 | _scheme -> return () 187 | 188 | retryWhen :: () 189 | => r <: Error x 190 | => r <: IOE 191 | => Show x 192 | => (x -> Bool) 193 | -> Int 194 | -> Eff r a 195 | -> Eff r a 196 | retryWhen p n f = f 197 | & do trapIn \exception -> do 198 | if n > 0 199 | then do 200 | if p exception 201 | then do 202 | liftIO $ CIO.hPutStrLn IO.stderr $ "WARNING: " <> tshow exception <> " (retrying)" 203 | liftIO $ IO.threadDelay 1000000 204 | retryWhen p (n - 1) f 205 | else throw exception 206 | else throw exception 207 | 208 | retryUnless :: forall x r a. () 209 | => Show x 210 | => r <: Error x 211 | => r <: IOE 212 | => (x -> Bool) 213 | -> Int 214 | -> Eff r a 215 | -> Eff r a 216 | retryUnless p = retryWhen (not . p) 217 | 218 | retryS3 :: () 219 | => r <: Error AwsStatusError 220 | => r <: IOE 221 | => Int 222 | -> Eff r a 223 | -> Eff r a 224 | retryS3 maxRetries a = do 225 | retryWhen retryPredicate maxRetries a 226 | where retryPredicate :: AwsStatusError -> Bool 227 | retryPredicate e = statusCodeOf e `elem` retryableHTTPStatuses 228 | 229 | -- https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#ErrorCodeList 230 | -- https://stackoverflow.com/a/51770411/2976251 231 | -- another note: linode rate limiting returns 503 232 | retryableHTTPStatuses :: [Int] 233 | retryableHTTPStatuses = [408, 409, 425, 426, 502, 503, 504] 234 | 235 | linkOrCopyResource :: () 236 | => r <: DataLog AwsLogEntry 237 | => r <: Error AwsError 238 | => r <: Error AwsStatusError 239 | => r <: Error CopyFailed 240 | => r <: Error NotImplemented 241 | => r <: Error UnsupportedUri 242 | => r <: IOE 243 | => r <: Lazy AwsEnv 244 | => r <: Resource 245 | => Location 246 | -> Location 247 | -> Eff r () 248 | linkOrCopyResource source target = case source of 249 | LocalFile sourcePath -> case target of 250 | LocalFile targetPath -> do 251 | liftIO $ IO.createDirectoryIfMissing True (FP.takeDirectory targetPath) 252 | targetPathExists <- liftIO $ IO.doesFileExist targetPath 253 | unless targetPathExists $ liftIO $ IO.createFileLink sourcePath targetPath 254 | Uri _ -> throw $ NotImplemented "Can't copy between different file backends" 255 | Uri sourceUri -> case target of 256 | LocalFile _targetPath -> throw $ NotImplemented "Can't copy between different file backends" 257 | Uri targetUri -> case (sourceUri ^. the @"uriScheme", targetUri ^. the @"uriScheme") of 258 | ("s3:", "s3:") -> retryUnless @AwsStatusError ((== 301) . statusCodeOf) 3 (S3.copyS3Uri (URI.reslashUri sourceUri) (URI.reslashUri targetUri)) 259 | ("http:", "http:") -> throw $ NotImplemented "Link and copy unsupported for http backend" 260 | (sourceScheme, targetScheme) -> throw $ NotImplemented $ "Unsupported backend combination: " <> T.pack sourceScheme <> " to " <> T.pack targetScheme 261 | 262 | readHttpUri :: () 263 | => r <: Error HttpError 264 | => r <: Error InvalidUrl 265 | => r <: IOE 266 | => URI 267 | -> Eff r LBS.ByteString 268 | readHttpUri httpUri = handleHttpError do 269 | manager <- liftIO $ HTTP.newManager HTTPS.tlsManagerSettings 270 | request <- liftIO $ HTTP.parseUrlThrow (T.unpack ("GET " <> tshow (URI.reslashUri httpUri))) 271 | response <- liftIO $ HTTP.httpLbs request manager 272 | 273 | return $ HTTP.responseBody response 274 | 275 | headHttpUri :: () 276 | => r <: Error HttpError 277 | => r <: Error InvalidUrl 278 | => r <: IOE 279 | => URI 280 | -> Eff r LBS.ByteString 281 | headHttpUri httpUri = handleHttpError do 282 | manager <- liftIO $ HTTP.newManager HTTP.defaultManagerSettings 283 | request <- liftIO $ HTTP.parseUrlThrow (T.unpack ("HEAD " <> tshow (URI.reslashUri httpUri))) 284 | response <- liftIO $ HTTP.httpLbs request manager 285 | 286 | return $ HTTP.responseBody response 287 | 288 | removePathRecursive :: () 289 | => r <: IOE 290 | => [Char] 291 | -> Eff r () 292 | removePathRecursive pkgStorePath = 293 | liftIO (IO.removeDirectoryRecursive pkgStorePath) 294 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/IO/Tar.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.IO.Tar 2 | ( ArchiveError(..), 3 | TarGroup(..), 4 | createTar, 5 | extractTar, 6 | ) where 7 | 8 | import Control.DeepSeq (NFData) 9 | import Effectful 10 | import Effectful.Zoo.Core.Error.Static 11 | import Effectful.Zoo.Core 12 | import HaskellWorks.Prelude 13 | 14 | import qualified System.Exit as IO 15 | import qualified System.Process as IO 16 | 17 | newtype ArchiveError = ArchiveError Text deriving (Eq, Show, Generic) 18 | 19 | data TarGroup = TarGroup 20 | { basePath :: FilePath 21 | , entryPaths :: [FilePath] 22 | } deriving (Show, Eq, Generic, NFData) 23 | 24 | createTar :: () 25 | => r <: Error ArchiveError 26 | => r <: IOE 27 | => Foldable t 28 | => [Char] 29 | -> t TarGroup 30 | -> Eff r () 31 | createTar tarFile groups = do 32 | let args = ["-zcf", tarFile] <> foldMap tarGroupToArgs groups 33 | process <- liftIO $ IO.spawnProcess "tar" args 34 | exitCode <- liftIO $ IO.waitForProcess process 35 | case exitCode of 36 | IO.ExitSuccess -> return () 37 | IO.ExitFailure n -> throw $ ArchiveError $ "Failed to create tar. Exit code: " <> tshow n 38 | 39 | extractTar :: () 40 | => r <: Error ArchiveError 41 | => r <: IOE 42 | => String 43 | -> String 44 | -> Eff r () 45 | extractTar tarFile targetPath = do 46 | process <- liftIO $ IO.spawnProcess "tar" ["-C", targetPath, "-zxf", tarFile] 47 | exitCode <- liftIO $ IO.waitForProcess process 48 | case exitCode of 49 | IO.ExitSuccess -> return () 50 | IO.ExitFailure n -> throw $ ArchiveError $ "Failed to extract tar. Exit code: " <> tshow n 51 | 52 | tarGroupToArgs :: TarGroup -> [String] 53 | tarGroupToArgs tarGroup = ["-C", tarGroup.basePath] <> tarGroup.entryPaths 54 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Location.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Location 2 | ( IsPath(..) 3 | , Location(..) 4 | , toLocation 5 | ) 6 | where 7 | 8 | import Data.Generics.Product.Any (HasAny(the)) 9 | import HaskellWorks.CabalCache.AWS.S3.URI (S3Uri (..)) 10 | import HaskellWorks.Prelude 11 | import Lens.Micro 12 | import Network.URI (URI) 13 | 14 | import qualified Amazonka.Data as AWS 15 | import qualified Amazonka.S3 as AWS 16 | import qualified Data.Text as T 17 | import qualified Network.URI as URI 18 | import qualified System.FilePath as FP 19 | 20 | class IsPath a s | a -> s where 21 | () :: a -> s -> a 22 | (<.>) :: a -> s -> a 23 | 24 | infixr 5 25 | infixr 7 <.> 26 | 27 | data Location 28 | = Uri URI 29 | | LocalFile FilePath 30 | deriving (Show, Eq, Generic) 31 | 32 | instance AWS.ToText Location where 33 | toText (Uri uri) = tshow uri 34 | toText (LocalFile p) = T.pack p 35 | 36 | instance IsPath Location Text where 37 | Uri b p = Uri (b p) 38 | LocalFile b p = LocalFile (b T.unpack p) 39 | 40 | Uri b <.> e = Uri (b <.> e) 41 | LocalFile b <.> e = LocalFile (b <.> T.unpack e) 42 | 43 | instance IsPath Text Text where 44 | b p = T.pack (T.unpack b FP. T.unpack p) 45 | b <.> e = T.pack (T.unpack b FP.<.> T.unpack e) 46 | 47 | instance IsPath URI Text where 48 | b p = b & the @"uriPath" %~ (<> "/" <> T.unpack p) 49 | b <.> e = b & the @"uriPath" %~ (<> "." <> T.unpack e) 50 | 51 | instance (a ~ Char) => IsPath [a] [a] where 52 | b p = b FP. p 53 | b <.> e = b FP.<.> e 54 | 55 | instance IsPath S3Uri Text where 56 | S3Uri b (AWS.ObjectKey k) p = 57 | S3Uri b (AWS.ObjectKey (stripEnd "/" k <> "/" <> stripStart "/" p)) 58 | 59 | S3Uri b (AWS.ObjectKey k) <.> e = 60 | S3Uri b (AWS.ObjectKey (stripEnd "." k <> "." <> stripStart "." e)) 61 | 62 | toLocation :: Text -> Maybe Location 63 | toLocation t = case URI.parseURI (T.unpack t) of 64 | Just uri -> Just (Uri uri) 65 | Nothing -> Just (LocalFile (T.unpack t)) 66 | 67 | ------------------------------------------------------------------------------- 68 | stripStart :: Text -> Text -> Text 69 | stripStart what txt = fromMaybe txt (T.stripPrefix what txt) 70 | 71 | stripEnd :: Text -> Text -> Text 72 | stripEnd what txt = fromMaybe txt (T.stripSuffix what txt) 73 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Metadata.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Metadata 2 | ( metaDir, 3 | createMetadata, 4 | loadMetadata, 5 | deleteMetadata, 6 | ) where 7 | 8 | import HaskellWorks.CabalCache.Core (PackageInfo (..)) 9 | import HaskellWorks.CabalCache.IO.Tar (TarGroup (..)) 10 | import HaskellWorks.Prelude 11 | import System.FilePath (takeFileName, ()) 12 | 13 | import qualified Data.ByteString.Lazy as LBS 14 | import qualified Data.Map.Strict as Map 15 | import qualified Data.Text as T 16 | import qualified System.Directory as IO 17 | 18 | metaDir :: String 19 | metaDir = "_CC_METADATA" 20 | 21 | createMetadata :: MonadIO m => FilePath -> PackageInfo -> [(T.Text, LBS.ByteString)] -> m TarGroup 22 | createMetadata storePath pkg values = liftIO do 23 | let pkgMetaPath = storePath pkg.packageDir metaDir 24 | IO.createDirectoryIfMissing True pkgMetaPath 25 | forM_ values $ \(k, v) -> LBS.writeFile (pkgMetaPath T.unpack k) v 26 | pure $ TarGroup storePath [pkg.packageDir metaDir] 27 | 28 | loadMetadata :: MonadIO m => FilePath -> m (Map.Map T.Text LBS.ByteString) 29 | loadMetadata pkgStorePath = liftIO do 30 | let pkgMetaPath = pkgStorePath metaDir 31 | exists <- IO.doesDirectoryExist pkgMetaPath 32 | if not exists 33 | then pure Map.empty 34 | else IO.listDirectory pkgMetaPath 35 | <&> fmap (pkgMetaPath ) 36 | >>= traverse (\mfile -> (T.pack (takeFileName mfile),) <$> LBS.readFile mfile) 37 | <&> Map.fromList 38 | 39 | deleteMetadata :: MonadIO m => FilePath -> m () 40 | deleteMetadata pkgStorePath = 41 | liftIO $ IO.removeDirectoryRecursive (pkgStorePath metaDir) 42 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Options.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Options 2 | ( readOrFromTextOption, 3 | ) where 4 | 5 | import Amazonka.Data.Text (FromText (..), fromText) 6 | import HaskellWorks.Prelude 7 | import Options.Applicative (Parser, Mod, OptionFields) 8 | 9 | import qualified Data.Text as T 10 | import qualified Options.Applicative as OA 11 | 12 | orElse :: Either e a -> Either e a -> Either e a 13 | orElse a b = 14 | either (const b) Right a 15 | 16 | readOrFromTextOption :: (Read a, FromText a) => Mod OptionFields a -> Parser a 17 | readOrFromTextOption = 18 | let fromStr s = readEither s `orElse` fromText (T.pack s) 19 | in OA.option $ OA.eitherReader fromStr 20 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Store.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Store 2 | ( cleanupStorePath, 3 | ) where 4 | 5 | import Effectful 6 | import Effectful.Zoo.Core 7 | import HaskellWorks.Prelude 8 | 9 | import qualified HaskellWorks.CabalCache.IO.Lazy as IO 10 | import qualified System.Directory as IO 11 | 12 | cleanupStorePath :: () 13 | => r <: IOE 14 | => FilePath 15 | -> Eff r () 16 | cleanupStorePath packageStorePath = do 17 | pathExists <- liftIO $ IO.doesPathExist packageStorePath 18 | when pathExists $ void $ IO.removePathRecursive packageStorePath 19 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Text.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Text 2 | ( maybeStripPrefix, 3 | ) where 4 | 5 | import Data.Maybe (fromMaybe) 6 | import Data.Text (Text) 7 | 8 | import qualified Data.Text as T 9 | 10 | maybeStripPrefix :: Text -> Text -> Text 11 | maybeStripPrefix prefix text = fromMaybe text (T.stripPrefix prefix text) 12 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Topology.hs: -------------------------------------------------------------------------------- 1 | {- HLINT ignore "Functor law" -} 2 | 3 | module HaskellWorks.CabalCache.Topology 4 | ( PlanData(..), 5 | buildPlanData, 6 | canShare, 7 | ) where 8 | 9 | import Control.Arrow ((&&&)) 10 | import Data.Generics.Product.Any (the) 11 | import Data.Map.Strict (Map) 12 | import Data.Set (Set) 13 | import HaskellWorks.CabalCache.Types 14 | import HaskellWorks.Prelude 15 | import HaskellWorks.Unsafe 16 | import Lens.Micro.Extras (view) 17 | 18 | import qualified Data.List as L 19 | import qualified Data.Map.Strict as M 20 | import qualified Data.Set as S 21 | import qualified Topograph as TG 22 | 23 | newtype PlanData = PlanData 24 | { nonShareable :: Set PackageId 25 | } deriving Generic 26 | 27 | buildPlanData :: PlanJson -- ^ The original plan 28 | -> [PackageId] -- ^ Packages that are known to be non-shareable 29 | -> PlanData -- ^ Updated plan 30 | buildPlanData plan nonShareablePkgs = 31 | let dm = dependenciesMap plan.installPlan 32 | in buildPlanData' dm nonShareablePkgs 33 | 34 | canShare :: PlanData -> PackageId -> Bool 35 | canShare planData pkgId = S.notMember pkgId planData.nonShareable 36 | 37 | ------------------------------------------------------------------------------- 38 | 39 | dependenciesMap :: [Package] -> Map PackageId (Set PackageId) 40 | dependenciesMap plan = plan 41 | <&> (view (the @"id") &&& view (the @"depends")) 42 | <&> fmap S.fromList & M.fromList 43 | 44 | buildPlanData' :: Map PackageId (Set PackageId) -- ^ Dependencies map 45 | -> [PackageId] -- ^ Packages to exclude 46 | -> PlanData -- ^ All package ids to exclude 47 | buildPlanData' plan knownNonShareable = 48 | fromRight (error "Could not process dependencies") $ 49 | TG.runG plan $ \g -> 50 | let tg = TG.transpose g 51 | nsPaths = L.concatMap (fromMaybe [] . paths tg) knownNonShareable 52 | nsAll = S.fromList (join nsPaths) 53 | in PlanData { nonShareable = nsAll } 54 | where paths g x = (fmap . fmap . fmap) (TG.gFromVertex g) $ TG.dfs g <$> TG.gToVertex g x 55 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Types.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Types 2 | ( CompilerId, 3 | PackageId, 4 | PackageName, 5 | CompilerContext(..), 6 | Components(..), 7 | PlanJson(..), 8 | Package(..), 9 | Lib(..), 10 | ) where 11 | 12 | import Data.Aeson (FromJSON(parseJSON), (.!=), (.:), (.:?)) 13 | import Data.Text (Text) 14 | import GHC.Generics (Generic) 15 | import Prelude hiding (id) 16 | 17 | import qualified Data.Aeson as J 18 | 19 | type CompilerId = Text 20 | type PackageId = Text 21 | type PackageName = Text 22 | 23 | data PlanJson = PlanJson 24 | { compilerId :: CompilerId 25 | , installPlan :: [Package] 26 | } deriving (Eq, Show, Generic) 27 | 28 | data Package = Package 29 | { packageType :: Text 30 | , id :: PackageId 31 | , name :: Text 32 | , version :: Text 33 | , style :: Maybe Text 34 | , componentName :: Maybe Text 35 | , components :: Maybe Components 36 | , depends :: [Text] 37 | , exeDepends :: [Text] 38 | } deriving (Eq, Show, Generic) 39 | 40 | newtype Components = Components 41 | { lib :: Maybe Lib 42 | } deriving (Eq, Show, Generic) 43 | 44 | data Lib = Lib 45 | { depends :: [Text] 46 | , exeDepends :: [Text] 47 | } deriving (Eq, Show, Generic) 48 | 49 | newtype CompilerContext = CompilerContext 50 | { ghcPkgCmd :: [String] 51 | } deriving (Show, Eq, Generic) 52 | 53 | instance FromJSON PlanJson where 54 | parseJSON = J.withObject "PlanJson" $ \v -> PlanJson 55 | <$> v .: "compiler-id" 56 | <*> v .: "install-plan" 57 | 58 | instance FromJSON Package where 59 | parseJSON = J.withObject "Package" $ \v -> do 60 | packageType <- v .: "type" 61 | id <- v .: "id" 62 | name <- v .: "pkg-name" 63 | version <- v .: "pkg-version" 64 | style <- v .:? "style" 65 | componentName <- v .:? "component-name" 66 | components <- v .:? "components" 67 | depends <- v .:? "depends" .!= [] 68 | exeDepends <- v .:? "exe-depends" .!= [] 69 | return Package {..} 70 | 71 | instance FromJSON Components where 72 | parseJSON = J.withObject "Components" $ \v -> Components 73 | <$> v .:? "lib" 74 | 75 | instance FromJSON Lib where 76 | parseJSON = J.withObject "Lib" $ \v -> Lib 77 | <$> v .:? "depends" .!= [] 78 | <*> v .:? "exe-depends" .!= [] 79 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/URI.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.URI 2 | ( reslashUri, 3 | ) where 4 | 5 | import Data.Generics.Product.Any (HasAny(the)) 6 | import HaskellWorks.Prelude 7 | import Lens.Micro 8 | import Network.URI (URI) 9 | 10 | reslashUri :: URI -> URI 11 | reslashUri uri = uri & the @"uriPath" %~ fmap reslashChar 12 | where reslashChar :: Char -> Char 13 | reslashChar '\\' = '/' 14 | reslashChar c = c 15 | -------------------------------------------------------------------------------- /src/HaskellWorks/CabalCache/Version.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.Version 2 | ( archiveVersion, 3 | ) where 4 | 5 | import Data.String (IsString) 6 | 7 | archiveVersion :: IsString s => s 8 | archiveVersion = "v2" 9 | -------------------------------------------------------------------------------- /test/HaskellWorks/CabalCache/AwsSpec.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.AwsSpec 2 | ( spec 3 | ) where 4 | 5 | import Data.Generics.Product.Any 6 | import Effectful 7 | import Effectful.Concurrent 8 | import Effectful.Resource 9 | import Effectful.Environment 10 | import Effectful.Error.Static (runError) 11 | import Effectful.Zoo.Amazonka.Api.Discover 12 | import Effectful.Zoo.Amazonka.Data.AwsEnv 13 | import Effectful.Zoo.Amazonka.Data.AwsError 14 | import Effectful.Zoo.Amazonka.Data.AwsLogEntry 15 | import Effectful.Zoo.Core 16 | import Effectful.Zoo.Core.Error.Static 17 | import Effectful.Zoo.Hedgehog.Api 18 | import Effectful.Zoo.Hedgehog.Dynamic 19 | import Effectful.Zoo.HUnit 20 | import Effectful.Zoo.Lazy.Dynamic 21 | import HaskellWorks.CabalCache.AppError (AwsStatusError(..)) 22 | import HaskellWorks.CabalCache.Error (UnsupportedUri) 23 | import HaskellWorks.Prelude 24 | import Lens.Micro 25 | import Test.Hspec 26 | 27 | import qualified HaskellWorks.CabalCache.AWS.S3 as AWS 28 | import qualified Network.URI as URI 29 | 30 | {- HLINT ignore "Redundant do" -} 31 | {- HLINT ignore "Reduce duplication" -} 32 | {- HLINT ignore "Redundant bracket" -} 33 | 34 | data Success = Success deriving Show 35 | 36 | catchSuccess :: forall r. () 37 | => r <: Hedgehog 38 | => r <: IOE 39 | => Eff (Error Success : r) Void 40 | -> Eff r () 41 | catchSuccess f = do 42 | result <- f & runError @Success 43 | case result of 44 | Left (_, Success) -> pure () 45 | Right a -> absurd a 46 | 47 | runTestEnv :: forall a r. () 48 | => r <: IOE 49 | => Eff 50 | ( Lazy AwsEnv 51 | : Concurrent 52 | : Environment 53 | : Resource 54 | : r 55 | ) a 56 | -> Eff r a 57 | runTestEnv f = 58 | f 59 | & runLazy discoverAwsEnv 60 | & runConcurrent 61 | & runEnvironment 62 | & runResource 63 | 64 | spec :: Spec 65 | spec = describe "HaskellWorks.CabalCache.QuerySpec" do 66 | it "stub" $ requireTest $ hedgehog $ runTestEnv $ catchSuccess $ do 67 | uri <- URI.parseURI "s3://cache.haskellworks.io/test/cabal-cache/ci" 68 | & onNothingFail 69 | 70 | void (AWS.headS3Uri uri) 71 | & jotShowDataLog @AwsLogEntry 72 | & do trap @AwsStatusError \e -> do 73 | assert $ e.status ^. the @"statusCode" == 404 || e.status ^. the @"statusCode" == 301 74 | throw Success 75 | & do trap_ @AwsError failure 76 | & do trap_ @AwsStatusError failure 77 | & do trap_ @UnsupportedUri failure 78 | 79 | failure 80 | -------------------------------------------------------------------------------- /test/HaskellWorks/CabalCache/IntegrationSpec.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.IntegrationSpec 2 | ( spec 3 | ) where 4 | 5 | import HaskellWorks.Prelude 6 | import System.FilePath (()) 7 | import Test.Hspec (Spec, describe, it) 8 | 9 | import qualified Data.List as L 10 | import qualified Data.Time.Clock as DT 11 | import qualified Data.Time.Format as DT 12 | import qualified HaskellWorks.Hspec.Hedgehog as H 13 | import qualified Hedgehog as H 14 | import qualified Hedgehog.Extras.Test.Base as H 15 | import qualified System.Directory as IO 16 | import qualified System.Environment as IO 17 | import qualified System.FilePath.Glob as IO 18 | import qualified Test.Base as H 19 | 20 | {- HLINT ignore "Redundant do" -} 21 | {- HLINT ignore "Reduce duplication" -} 22 | {- HLINT ignore "Redundant bracket" -} 23 | 24 | spec :: Spec 25 | spec = describe "HaskellWorks.CabalCache.IntegrationSpec" do 26 | it "local" $ H.require . H.withTests 1 $ H.integration $ H.runFinallies . H.workspace "local" $ \tempAbsBasePath' -> do 27 | cwd <- H.noteIO IO.getCurrentDirectory 28 | let archivePath = tempAbsBasePath' "archive" 29 | let storePath = tempAbsBasePath' "store" 30 | let buildPath = cwd "dist-newstyle" 31 | 32 | H.execCabalCache_ 33 | [ "sync-to-archive" 34 | , "--archive-uri" 35 | , archivePath 36 | , "--build-path" 37 | , buildPath 38 | ] 39 | 40 | H.execCabalCache_ 41 | [ "sync-from-archive" 42 | , "--archive-uri" 43 | , archivePath 44 | , "--store-path" 45 | , tempAbsBasePath' "store" 46 | , "--build-path" 47 | , buildPath 48 | ] 49 | 50 | archivedPackages <- H.noteShowIO $ IO.globDir1 (IO.compile "**/*.tar.gz") archivePath 51 | restoredPackages <- H.noteShowIO $ IO.globDir1 (IO.compile "**/cabal-hash.txt") storePath 52 | 53 | H.assert $ L.length archivedPackages > 20 -- At least some packages should have been archived 54 | H.assert $ L.length restoredPackages > 20 -- At least some packages should have been archived 55 | 56 | it "remote" $ H.require . H.withTests 1 $ H.integration $ H.runFinallies . H.workspace "local" $ \tempAbsBasePath' -> do 57 | cwd <- H.noteIO $ IO.getCurrentDirectory 58 | mBinaryCacheUri <- H.noteShowIO $ IO.lookupEnv "BINARY_CACHE_URI" 59 | 60 | forM_ mBinaryCacheUri \binaryCacheUri -> do 61 | now <- H.evalIO DT.getCurrentTime 62 | 63 | let formattedNow = DT.formatTime DT.defaultTimeLocale "%Y-%m-%dT%H:%M:%S" now 64 | let storePath = tempAbsBasePath' "store" 65 | let buildPath = cwd "dist-newstyle" 66 | let archiveUri = binaryCacheUri <> "/" <> "cabal-cache-test" <> "/" <> formattedNow 67 | 68 | H.execCabalCache_ 69 | [ "sync-to-archive" 70 | , "--archive-uri" 71 | , archiveUri 72 | , "--build-path" 73 | , buildPath 74 | ] 75 | 76 | H.execCabalCache_ 77 | [ "sync-from-archive" 78 | , "--archive-uri" 79 | , archiveUri 80 | , "--store-path" 81 | , tempAbsBasePath' "store" 82 | , "--build-path" 83 | , buildPath 84 | ] 85 | 86 | restoredPackages <- H.noteShowIO $ IO.globDir1 (IO.compile "**/cabal-hash.txt") storePath 87 | 88 | H.assert $ L.length restoredPackages > 20 -- At least some packages should have been archived 89 | -------------------------------------------------------------------------------- /test/HaskellWorks/CabalCache/LocationSpec.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.LocationSpec 2 | ( spec, 3 | ) where 4 | 5 | import Data.Maybe (fromJust) 6 | import HaskellWorks.CabalCache.Location 7 | import HaskellWorks.Hspec.Hedgehog 8 | import HaskellWorks.Prelude 9 | import Hedgehog 10 | import Network.URI (URI) 11 | import Test.Hspec 12 | 13 | import qualified Amazonka.Data as AWS 14 | import qualified Data.List as L 15 | import qualified Data.List as List 16 | import qualified Data.Text as Text 17 | import qualified Hedgehog.Gen as Gen 18 | import qualified Hedgehog.Range as Range 19 | import qualified Network.URI as URI 20 | import qualified System.FilePath as FP 21 | 22 | {- HLINT ignore "Redundant do" -} 23 | {- HLINT ignore "Reduce duplication" -} 24 | {- HLINT ignore "Redundant bracket" -} 25 | 26 | s3Uri :: MonadGen m => m URI 27 | s3Uri = do 28 | let partGen = Gen.string (Range.linear 3 10) Gen.alphaNum 29 | bkt <- partGen 30 | parts <- Gen.list (Range.linear 1 5) partGen 31 | ext <- Gen.string (Range.linear 2 4) Gen.alphaNum 32 | pure $ fromJust $ URI.parseURI $ "s3://" <> bkt <> "/" <> L.intercalate "/" parts <> "." <> ext 33 | localPath :: MonadGen m => m FilePath 34 | localPath = do 35 | let partGen = Gen.string (Range.linear 3 10) Gen.alphaNum 36 | parts <- Gen.list (Range.linear 1 5) partGen 37 | ext <- Gen.string (Range.linear 2 4) Gen.alphaNum 38 | pure $ "/" <> List.intercalate "/" parts <> "." <> ext 39 | 40 | spec :: Spec 41 | spec = describe "HaskellWorks.Assist.LocationSpec" do 42 | it "URI bucket-only" $ requireTest do 43 | fromJust (URI.parseURI "s3://bucket") "directory" === fromJust (URI.parseURI "s3://bucket/directory") 44 | 45 | it "Location bucket-only" $ requireTest do 46 | fromJust (toLocation "s3://bucket") "directory" === fromJust (toLocation "s3://bucket/directory") 47 | 48 | it "S3 should roundtrip from and to text" $ require $ property do 49 | uri <- forAll s3Uri 50 | tripping (Uri uri) AWS.toText toLocation 51 | 52 | it "LocalLocation should roundtrip from and to text" $ require $ property do 53 | path <- forAll localPath 54 | tripping (LocalFile path) AWS.toText toLocation 55 | 56 | it "Should append s3 path" $ require $ property do 57 | loc <- Uri <$> forAll s3Uri 58 | part <- forAll $ Gen.text (Range.linear 3 10) Gen.alphaNum 59 | ext <- forAll $ Gen.text (Range.linear 2 4) Gen.alphaNum 60 | AWS.toText (loc part <.> ext) === AWS.toText loc <> "/" <> part <> "." <> ext 61 | 62 | it "Should append s3 path" $ require $ property do 63 | loc <- LocalFile <$> forAll localPath 64 | part <- forAll $ Gen.string (Range.linear 3 10) Gen.alphaNum 65 | ext <- forAll $ Gen.string (Range.linear 2 4) Gen.alphaNum 66 | AWS.toText (loc Text.pack part <.> Text.pack ext) === Text.pack ((Text.unpack $ AWS.toText loc) FP. part FP.<.> ext) 67 | -------------------------------------------------------------------------------- /test/HaskellWorks/CabalCache/QuerySpec.hs: -------------------------------------------------------------------------------- 1 | module HaskellWorks.CabalCache.QuerySpec 2 | ( spec, 3 | ) where 4 | 5 | import HaskellWorks.Hspec.Hedgehog 6 | import HaskellWorks.Prelude 7 | import Hedgehog 8 | import Test.Hspec 9 | import Text.RawString.QQ 10 | 11 | import qualified Data.Aeson as A 12 | import qualified Data.ByteString.Lazy as LBS 13 | import qualified HaskellWorks.CabalCache.Types as Z 14 | 15 | {- HLINT ignore "Redundant do" -} 16 | {- HLINT ignore "Reduce duplication" -} 17 | {- HLINT ignore "Redundant bracket" -} 18 | 19 | spec :: Spec 20 | spec = describe "HaskellWorks.Assist.QuerySpec" do 21 | it "stub" $ requireTest do 22 | case A.eitherDecode exampleJson of 23 | Right planJson -> do 24 | planJson === Z.PlanJson 25 | { Z.compilerId = "ghc-8.6.4" 26 | , Z.installPlan = 27 | [ Z.Package 28 | { Z.packageType = "pre-existing" 29 | , Z.id = "Cabal-2.4.0.1" 30 | , Z.name = "Cabal" 31 | , Z.version = "2.4.0.1" 32 | , Z.style = Nothing 33 | , Z.componentName = Nothing 34 | , Z.components = Nothing 35 | , Z.depends = 36 | [ "array-0.5.3.0" 37 | , "base-4.12.0.0" 38 | ] 39 | , Z.exeDepends = [] 40 | } 41 | ] 42 | } 43 | Left msg -> fail msg 44 | 45 | exampleJson :: LBS.ByteString 46 | exampleJson = [r| 47 | { 48 | "cabal-version": "2.4.1.0", 49 | "cabal-lib-version": "2.4.1.0", 50 | "compiler-id": "ghc-8.6.4", 51 | "os": "osx", 52 | "arch": "x86_64", 53 | "install-plan": [ 54 | { 55 | "type": "pre-existing", 56 | "id": "Cabal-2.4.0.1", 57 | "pkg-name": "Cabal", 58 | "pkg-version": "2.4.0.1", 59 | "depends": [ 60 | "array-0.5.3.0", 61 | "base-4.12.0.0" 62 | ] 63 | } 64 | ] 65 | } 66 | |] 67 | -------------------------------------------------------------------------------- /test/Spec.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -F -pgmF hspec-discover #-} 2 | -------------------------------------------------------------------------------- /test/Test/Base.hs: -------------------------------------------------------------------------------- 1 | module Test.Base 2 | ( execCabalCache 3 | , execCabalCache_ 4 | , execCabalCache' 5 | , integration 6 | ) where 7 | 8 | import Control.Monad.Catch (MonadCatch) 9 | import HaskellWorks.Prelude 10 | import Hedgehog (MonadTest) 11 | import Hedgehog.Extras.Test.Process (ExecConfig) 12 | 13 | import qualified GHC.Stack as GHC 14 | import qualified Hedgehog as H 15 | import qualified Hedgehog.Extras.Test.Base as H 16 | import qualified Hedgehog.Extras.Test.Process as H 17 | 18 | integration :: HasCallStack => H.Integration () -> H.Property 19 | integration = H.withTests 1 . H.propertyOnce 20 | 21 | -- | Run cabal-cache, returning the stdout 22 | execCabalCache 23 | :: (MonadTest m, MonadCatch m, MonadIO m, HasCallStack) 24 | => [String] 25 | -> m String 26 | execCabalCache = GHC.withFrozenCallStack $ H.execFlex "cabal-cache" "CABAL_CACHE" 27 | 28 | -- | Run cabal-cache, discarding return value 29 | execCabalCache_ 30 | :: (MonadTest m, MonadCatch m, MonadIO m, HasCallStack) 31 | => [String] 32 | -> m () 33 | execCabalCache_ = void . execCabalCache 34 | 35 | -- | Run cabal-cache, returning the stdout 36 | execCabalCache' 37 | :: (MonadTest m, MonadCatch m, MonadIO m, HasCallStack) 38 | => ExecConfig 39 | -> [String] 40 | -> m String 41 | execCabalCache' execConfig = GHC.withFrozenCallStack $ H.execFlex' execConfig "cabal-cache" "CABAL_CACHE" 42 | -------------------------------------------------------------------------------- /test/test-missing-all.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cabal_cache_exe="$1" 4 | archive="$2" 5 | 6 | for x in "$archive"/v2/6d91da2ce3/ghc-8.6.4/*; do 7 | echo "====== $(basename $x) ======" 8 | rm -rf ~/.cabal/store/ghc-8.6.4 9 | mkdir -p "$archive"-2 10 | cp "$archive"/v2/6d91da2ce3/ghc-8.6.4/* "$archive"-2/v2/6d91da2ce3/ghc-8.6.4/ 11 | rm "$archive"-2/v2/6d91da2ce3/ghc-8.6.4/$(basename $x) 12 | "$cabal_cache_exe" sync-from-archive --threads 16 --archive-uri "$archive"-2 --region Sydney > /dev/null 2> /dev/null 13 | ./project.sh build 14 | done 15 | -------------------------------------------------------------------------------- /test/test-missing.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cabal_cache_exe="$1" 4 | archive="$2" 5 | package_id="$3" 6 | 7 | for x in "$archive"/v2/6d91da2ce3/ghc-8.6.4/"$package_id"; do 8 | echo "====== $(basename $x) ======" 9 | rm -rf ~/.cabal/store/ghc-8.6.4 10 | mkdir -p "$archive"-2 11 | cp "$archive"/v2/6d91da2ce3/ghc-8.6.4/* "$archive"-2/v2/6d91da2ce3/ghc-8.6.4/ 12 | rm "$archive"-2/v2/6d91da2ce3/ghc-8.6.4/$(basename $x) 13 | "$cabal_cache_exe" sync-from-archive --threads 16 --archive-uri "$archive"-2 --region Sydney 14 | ./project.sh build 15 | done 16 | --------------------------------------------------------------------------------