├── .git-blame-ignore-revs ├── .github ├── CODEOWNERS ├── actions.yml ├── pr-labeler.yml ├── release-drafter.yml └── workflows │ ├── ci.yml │ ├── docs.yml │ ├── release-drafter.yml │ └── release.yml ├── .gitignore ├── .ruby-version ├── .scalafmt.conf ├── AUTHORS.md ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE.md ├── NOTICE.md ├── README.md ├── build.sbt ├── docs ├── AUTHORS.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE.md ├── NOTICE.md └── README.md ├── fetch-debug └── src │ └── main │ └── scala │ ├── debug.scala │ └── document.scala ├── fetch-examples └── src │ └── test │ ├── resources │ └── simplelogger.properties │ └── scala │ ├── DoobieExample.scala │ ├── GithubExample.scala │ ├── GraphQLExample.scala │ ├── Http4sExample.scala │ └── JedisExample.scala ├── fetch └── src │ ├── main │ └── scala │ │ ├── cache.scala │ │ ├── datasource.scala │ │ ├── execution.scala │ │ ├── fetch.scala │ │ ├── log.scala │ │ └── syntax.scala │ └── test │ └── scala │ ├── FetchAsyncQueryTests.scala │ ├── FetchBatchingTests.scala │ ├── FetchReportingTests.scala │ ├── FetchSpec.scala │ ├── FetchSyntaxTests.scala │ ├── FetchTests.scala │ └── TestHelper.scala ├── microsite ├── docs │ ├── docs.md │ └── index.md └── src │ └── main │ └── resources │ └── microsite │ ├── _data │ ├── commons.yml │ ├── features.yml │ └── menu.yml │ ├── _includes │ ├── _fetch-footer.html │ ├── _fetch-head.html │ ├── _fetch-header.html │ ├── _fetch-main.html │ └── _fetch-navigation.html │ ├── _layouts │ └── fetch-home.html │ ├── css │ └── custom.scss │ ├── custom-config.yml │ ├── img │ ├── favicon.ico │ ├── favicon.png │ ├── icon-feature-first.svg │ ├── icon-feature-second.svg │ ├── icon-feature-third.svg │ ├── nav-icon-close.svg │ ├── nav-icon-open.svg │ ├── navbar_brand.png │ ├── navbar_brand.svg │ ├── navbar_brand2x.png │ ├── pattern-background.png │ ├── pattern-background.svg │ ├── pattern-background@2x.png │ ├── poster.png │ ├── sidebar_brand.png │ ├── sidebar_brand.svg │ ├── sidebar_brand2x.png │ └── twitter-card.png │ ├── js │ └── automenu.js │ └── static │ ├── _sass │ ├── _breakpoint.scss │ ├── _components.scss │ ├── _fetch_docs.scss │ ├── _footer.scss │ ├── _header.scss │ ├── _main.scss │ ├── _navigation.scss │ └── _variables.scss │ ├── fetch_animation.json │ └── home.js └── project ├── ProjectPlugin.scala ├── build.properties └── plugins.sbt /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Scala Steward: Reformat with scalafmt 3.8.4 2 | 31437ca5e9dbc9d31fd10190094a2d341d625c6d 3 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | @47degrees/subterrel 2 | -------------------------------------------------------------------------------- /.github/actions.yml: -------------------------------------------------------------------------------- 1 | pre: 2 | docs: 'gem install jekyll -v 4' -------------------------------------------------------------------------------- /.github/pr-labeler.yml: -------------------------------------------------------------------------------- 1 | # Don't edit this file! 2 | # It is automatically updated after every release of https://github.com/47degrees/.github 3 | # If you want to suggest a change, please open a PR or issue in that repository 4 | 5 | enhancement: ['enhancement/*', 'feature/*'] 6 | documentation: ['docs/*', 'doc/*'] 7 | breaking-change: ['breaking/*', 'break/*'] 8 | bug: ['bug/*', 'fix/*'] 9 | tests: ['test/*', 'tests/*'] 10 | dependency-update: ['dep/*', 'dependency/*', 'dependency-update/*'] 11 | scala-steward: ['update/*'] 12 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | # Don't edit this file! 2 | # It is automatically updated after every release of https://github.com/47degrees/.github 3 | # If you want to suggest a change, please open a PR or issue in that repository 4 | 5 | name-template: 'v$NEXT_PATCH_VERSION' 6 | tag-template: 'v$NEXT_PATCH_VERSION' 7 | exclude-labels: 8 | - 'auto-update' 9 | - 'auto-documentation' 10 | - 'auto-changelog' 11 | categories: 12 | - title: '⚠️ Breaking changes' 13 | label: 'breaking-change' 14 | - title: '🚀 Features' 15 | label: 'enhancement' 16 | - title: '📘 Documentation' 17 | label: 'documentation' 18 | - title: '🐛 Bug Fixes' 19 | label: 'bug' 20 | - title: '📈 Dependency updates' 21 | labels: 22 | - 'dependency-update' 23 | - 'scala-steward' 24 | template: | 25 | ## What's changed 26 | 27 | $CHANGES 28 | 29 | ## Contributors to this release 30 | 31 | $CONTRIBUTORS 32 | 33 | autolabeler: 34 | - label: "enhancement" 35 | branch: 36 | - '/enhancement\/.+/' 37 | - '/feature\/.+/' 38 | - label: "documentation" 39 | files: 40 | - "*.md" 41 | branch: 42 | - '/docs\/.+/' 43 | - '/doc\/.+/' 44 | - label: "breaking-change" 45 | branch: 46 | - '/breaking\/.+/' 47 | - '/break\/.+/' 48 | - label: "bug" 49 | branch: 50 | - '/bug\/.+/' 51 | - '/fix\/.+/' 52 | - label: "tests" 53 | branch: 54 | - '/test\/.+/' 55 | - '/tests\/.+/' 56 | - label: "dependency-update" 57 | branch: 58 | - '/dep\/.+/' 59 | - '/dependency\/.+/' 60 | - '/dependency-update\/.+/' 61 | - label: "scala-steward" 62 | branch: 63 | - '/update\/.+/' 64 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # Don't edit this file! 2 | # It is automatically updated after every release of https://github.com/47degrees/.github 3 | # If you want to suggest a change, please open a PR or issue in that repository 4 | 5 | name: Formatters & Tests 6 | 7 | on: 8 | push: 9 | branches: [main] 10 | pull_request: 11 | types: 12 | - opened 13 | - reopened 14 | - synchronize 15 | - ready_for_review 16 | 17 | concurrency: 18 | group: ${{ github.workflow }}-${{ github.ref }} 19 | cancel-in-progress: true 20 | 21 | env: 22 | USE_RUBY: ${{ vars.USE_RUBY || 'false' }} 23 | USE_YQ: ${{ vars.USE_YQ || 'false' }} 24 | USE_CACHE: ${{ vars.USE_CACHE || 'false' }} 25 | CACHE_FOLDER: ${{ vars.CACHE_FOLDER || '.cache' }} 26 | 27 | jobs: 28 | test: 29 | if: ${{!contains(github.event.head_commit.message, 'skip ci') && !github.event.pull_request.draft}} 30 | runs-on: ubuntu-latest 31 | steps: 32 | - name: Checkout project (pull-request) 33 | if: github.event_name == 'pull_request' 34 | uses: actions/checkout@v2.3.2 35 | with: 36 | repository: ${{ github.event.pull_request.head.repo.full_name }} 37 | ref: ${{ github.event.pull_request.head.ref }} 38 | - name: Checkout project (main) 39 | if: github.event_name == 'push' 40 | uses: actions/checkout@v2 41 | - name: Setup Scala 42 | uses: olafurpg/setup-scala@v11 43 | with: 44 | java-version: adopt@1.11 45 | - name: Setup Ruby 46 | if: ${{ env.USE_RUBY == 'true' }} 47 | uses: ruby/setup-ruby@v1 48 | with: 49 | ruby-version: .ruby-version 50 | - name: Setup yq 51 | if: ${{ env.USE_YQ == 'true' }} 52 | run: sudo snap install yq 53 | - name: Run pre-conditions 54 | run: test -f .github/actions.yml && eval "$(yq e '.pre.ci // "true"' .github/actions.yml)" || true 55 | - name: Run scalafmt on Scala Steward PRs 56 | if: github.event.pull_request.user.login == '47erbot' && contains(github.event.pull_request.body, 'Scala Steward') 57 | run: sbt "scalafixEnable; fix" || sbt "scalafmtAll; scalafmtSbt" || true 58 | - name: Push changes 59 | uses: stefanzweifel/git-auto-commit-action@v4.5.1 60 | with: 61 | commit_message: Run formatter/linter 62 | - name: Restore cache 63 | id: ci-cache-restore 64 | if: ${{ env.USE_CACHE == 'true' }} 65 | uses: actions/cache/restore@v3 66 | with: 67 | path: | 68 | ${{ env.CACHE_FOLDER }} 69 | key: ci-cache-key 70 | - name: Run checks 71 | run: sbt ci-test 72 | env: 73 | GITHUB_TOKEN: ${{ secrets.ADMIN_GITHUB_TOKEN }} 74 | - name: Save cache 75 | id: ci-cache-save 76 | if: ${{ env.USE_CACHE == 'true' }} 77 | uses: actions/cache/save@v3 78 | with: 79 | path: | 80 | ${{ env.CACHE_FOLDER }} 81 | key: ${{ steps.ci-cache-restore.outputs.cache-primary-key }} 82 | - name: Run post-conditions 83 | run: test -f .github/actions.yml && eval "$(yq e '.post.ci // "true"' .github/actions.yml)" || true 84 | - name: Automerge Scala Steward PRs 85 | if: success() && github.event_name == 'pull_request' && contains(github.event.pull_request.body, 'Scala Steward') 86 | uses: ridedott/merge-me-action@v1.1.36 87 | with: 88 | GITHUB_LOGIN: 47erbot 89 | GITHUB_TOKEN: ${{ secrets.ADMIN_GITHUB_TOKEN }} 90 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | # Don't edit this file! 2 | # It is automatically updated after every release of https://github.com/47degrees/.github 3 | # If you want to suggest a change, please open a PR or issue in that repository 4 | 5 | name: Update documentation 6 | 7 | on: 8 | release: 9 | types: [published] 10 | repository_dispatch: 11 | types: [docs] 12 | 13 | jobs: 14 | documentation: 15 | if: "!contains(github.event.head_commit.message, 'skip ci')" 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout project 19 | uses: actions/checkout@v2 20 | with: 21 | token: ${{ secrets.ADMIN_GITHUB_TOKEN }} 22 | ref: main 23 | - name: Fetch tags 24 | run: git fetch --tags 25 | - name: Setup Scala 26 | uses: olafurpg/setup-scala@v11 27 | with: 28 | java-version: adopt@1.11 29 | - name: Setup Ruby 30 | uses: ruby/setup-ruby@v1 31 | with: 32 | ruby-version: .ruby-version 33 | - name: Setup github-changelog-generator 34 | run: gem install github_changelog_generator -v 1.15.0 35 | - name: Setup yq 36 | run: sudo snap install yq 37 | - name: Run pre-conditions 38 | run: test -f .github/actions.yml && eval "$(yq e '.pre.docs // "true"' .github/actions.yml)" || true 39 | - name: Generate documentation 40 | run: sbt ci-docs 41 | env: 42 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 43 | DOWNLOAD_INFO_FROM_GITHUB: true 44 | - name: Run post-conditions 45 | run: test -f .github/actions.yml && eval "$(yq e '.post.docs // "true"' .github/actions.yml)" || true 46 | - name: Push changes 47 | uses: stefanzweifel/git-auto-commit-action@v4.1.3 48 | with: 49 | commit_message: 'Update documentation, and other files [skip ci]' 50 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Drafts/updates the next repository release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | update_release_draft: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: release-drafter/release-drafter@v5 13 | env: 14 | GITHUB_TOKEN: ${{ secrets.ADMIN_GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # Don't edit this file! 2 | # It is automatically updated after every release of https://github.com/47degrees/.github 3 | # If you want to suggest a change, please open a PR or issue in that repository 4 | 5 | name: Release 6 | 7 | on: 8 | release: 9 | types: [published] 10 | push: 11 | branches: main 12 | 13 | jobs: 14 | release: 15 | if: "!contains(github.event.head_commit.message, 'skip ci')" 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout project 19 | uses: actions/checkout@v2 20 | with: 21 | fetch-depth: 0 22 | - name: Fetch tags 23 | run: git fetch --tags 24 | - name: Setup Scala 25 | uses: olafurpg/setup-scala@v11 26 | with: 27 | java-version: adopt@1.11 28 | - name: Setup Ruby 29 | uses: ruby/setup-ruby@v1 30 | with: 31 | ruby-version: .ruby-version 32 | - name: Setup GPG 33 | uses: olafurpg/setup-gpg@v3 34 | - name: Setup yq 35 | run: sudo snap install yq 36 | - name: Run pre-conditions 37 | run: test -f .github/actions.yml && eval "$(yq e '.pre.release // "true"' .github/actions.yml)" || true 38 | - name: Release new version 39 | run: sbt ci-publish 40 | env: 41 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 42 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 43 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 44 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 45 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 46 | - name: Run post-conditions 47 | run: test -f .github/actions.yml && eval "$(yq e '.post.release // "true"' .github/actions.yml)" || true 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Don't edit this file! 2 | # It is automatically updated after every release of https://github.com/47degrees/.github 3 | # If you want to suggest a change, please open a PR or issue in that repository 4 | 5 | ### Intellij ### 6 | 7 | .idea 8 | out/ 9 | 10 | ### Java ### 11 | 12 | *.class 13 | *.log 14 | 15 | ### macOS ### 16 | 17 | .DS_Store 18 | 19 | ### SBT ### 20 | 21 | dist/* 22 | target/ 23 | lib_managed/ 24 | src_managed/ 25 | project/boot/ 26 | project/plugins/project/ 27 | .history 28 | .cache 29 | .lib/ 30 | .bsp 31 | 32 | ### Scala ### 33 | 34 | *.metals 35 | .bloop/ 36 | .metals/ 37 | metals.sbt 38 | 39 | ### Scala-CLI ### 40 | 41 | .scala-build/ 42 | 43 | ### Mill ### 44 | 45 | # Techinically a duplicate of the IDEA section. 46 | out/ 47 | 48 | ### Vim ### 49 | 50 | # Swap 51 | [._]*.s[a-v][a-z] 52 | [._]*.sw[a-p] 53 | [._]s[a-rt-v][a-z] 54 | [._]ss[a-gi-z] 55 | [._]sw[a-p] 56 | 57 | # Session 58 | Session.vim 59 | Sessionx.vim 60 | 61 | # Temporary 62 | .netrwhist 63 | 64 | # Project local build artefacts 65 | .output 66 | 67 | # Auto-generated tag files 68 | tags 69 | 70 | # Persistent undo 71 | [._]*.un~ 72 | 73 | # Coc configuration directory 74 | .vim 75 | 76 | ### VisualStudioCode ### 77 | 78 | .vscode/ 79 | .vscode/* 80 | !.vscode/settings.json 81 | !.vscode/tasks.json 82 | !.vscode/launch.json 83 | !.vscode/extensions.json 84 | 85 | ### Direnv ### 86 | 87 | .direnv 88 | .envrc 89 | 90 | ### JavaScript / NPM / Yarn / Bundlers ### 91 | 92 | node_modules 93 | .parcel-cache 94 | dist 95 | -------------------------------------------------------------------------------- /.ruby-version: -------------------------------------------------------------------------------- 1 | 2.7.2 -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.9.4 2 | 3 | style = defaultWithAlign 4 | maxColumn = 100 5 | 6 | runner { 7 | dialect = scala213 8 | } 9 | 10 | continuationIndent.callSite = 2 11 | 12 | newlines { 13 | sometimesBeforeColonInMethodReturnType = false 14 | } 15 | 16 | align { 17 | arrowEnumeratorGenerator = false 18 | ifWhileOpenParen = false 19 | openParenCallSite = false 20 | openParenDefnSite = false 21 | } 22 | 23 | docstrings.style = Asterisk 24 | 25 | rewrite { 26 | rules = [SortImports, RedundantBraces] 27 | redundantBraces.maxLines = 1 28 | } 29 | 30 | rewriteTokens { 31 | "⇒": "=>" 32 | "←": "<-" 33 | } 34 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | # Authors 6 | 7 | ## Maintainers 8 | 9 | The maintainers of the project are: 10 | 11 | - [![cb372](https://avatars.githubusercontent.com/u/106760?v=4&s=20) **Chris Birchall (cb372)**](https://github.com/cb372) 12 | - [![franciscodr](https://avatars.githubusercontent.com/u/1200151?v=4&s=20) **Francisco Diaz (franciscodr)**](https://github.com/franciscodr) 13 | - [![juanpedromoreno](https://avatars.githubusercontent.com/u/4879373?v=4&s=20) **Juan Pedro Moreno (juanpedromoreno)**](https://github.com/juanpedromoreno) 14 | 15 | ## Contributors 16 | 17 | These are the people that have contributed to the _fetch_ project: 18 | 19 | - [![47erbot](https://avatars.githubusercontent.com/u/24799081?v=4&s=20) **47erbot**](https://github.com/47erbot) 20 | - [![purrgrammer](https://avatars.githubusercontent.com/u/42009830?v=4&s=20) **purrgrammer**](https://github.com/purrgrammer) 21 | - [![47degdev](https://avatars.githubusercontent.com/u/5580770?v=4&s=20) **47degdev**](https://github.com/47degdev) 22 | - [![juanpedromoreno](https://avatars.githubusercontent.com/u/4879373?v=4&s=20) **juanpedromoreno**](https://github.com/juanpedromoreno) 23 | - [![sloshy](https://avatars.githubusercontent.com/u/427237?v=4&s=20) **sloshy**](https://github.com/sloshy) 24 | - [![alejandrohdezma](https://avatars.githubusercontent.com/u/9027541?v=4&s=20) **alejandrohdezma**](https://github.com/alejandrohdezma) 25 | - [![peterneyens](https://avatars.githubusercontent.com/u/6407606?v=4&s=20) **peterneyens**](https://github.com/peterneyens) 26 | - [![AntonioMateoGomez](https://avatars.githubusercontent.com/u/25897490?v=4&s=20) **AntonioMateoGomez**](https://github.com/AntonioMateoGomez) 27 | - [![calvellido](https://avatars.githubusercontent.com/u/7753447?v=4&s=20) **calvellido**](https://github.com/calvellido) 28 | - [![raulraja](https://avatars.githubusercontent.com/u/456796?v=4&s=20) **raulraja**](https://github.com/raulraja) 29 | - [![fedefernandez](https://avatars.githubusercontent.com/u/720923?v=4&s=20) **fedefernandez**](https://github.com/fedefernandez) 30 | - [![franciscodr](https://avatars.githubusercontent.com/u/1200151?v=4&s=20) **franciscodr**](https://github.com/franciscodr) 31 | - [![Daenyth](https://avatars.githubusercontent.com/u/14644?v=4&s=20) **Daenyth**](https://github.com/Daenyth) 32 | - [![pepegar](https://avatars.githubusercontent.com/u/694179?v=4&s=20) **pepegar**](https://github.com/pepegar) 33 | - [![BenFradet](https://avatars.githubusercontent.com/u/1737211?v=4&s=20) **BenFradet**](https://github.com/BenFradet) 34 | - [![jordiolivares](https://avatars.githubusercontent.com/u/1163790?v=4&s=20) **jordiolivares**](https://github.com/jordiolivares) 35 | - [![MaureenElsberry](https://avatars.githubusercontent.com/u/17556002?v=4&s=20) **MaureenElsberry**](https://github.com/MaureenElsberry) 36 | - [![israelpzglez](https://avatars.githubusercontent.com/u/646886?v=4&s=20) **israelpzglez**](https://github.com/israelpzglez) 37 | - [![diesalbla](https://avatars.githubusercontent.com/u/1764610?v=4&s=20) **diesalbla**](https://github.com/diesalbla) 38 | - [![kubukoz](https://avatars.githubusercontent.com/u/894884?v=4&s=20) **kubukoz**](https://github.com/kubukoz) 39 | - [![jkmcclellan](https://avatars.githubusercontent.com/u/52432856?v=4&s=20) **jkmcclellan**](https://github.com/jkmcclellan) 40 | - [![lambdista](https://avatars.githubusercontent.com/u/4966276?v=4&s=20) **lambdista**](https://github.com/lambdista) 41 | - [![paulpdaniels](https://avatars.githubusercontent.com/u/2528918?v=4&s=20) **paulpdaniels**](https://github.com/paulpdaniels) 42 | - [![rafaparadela](https://avatars.githubusercontent.com/u/315070?v=4&s=20) **rafaparadela**](https://github.com/rafaparadela) 43 | - [![adelbertc](https://avatars.githubusercontent.com/u/1332980?v=4&s=20) **adelbertc**](https://github.com/adelbertc) 44 | - [![adpi2](https://avatars.githubusercontent.com/u/13123162?v=4&s=20) **adpi2**](https://github.com/adpi2) 45 | - [![a-khakimov](https://avatars.githubusercontent.com/u/33376759?v=4&s=20) **a-khakimov**](https://github.com/a-khakimov) 46 | - [![benderpremier](https://avatars.githubusercontent.com/u/290994?v=4&s=20) **benderpremier**](https://github.com/benderpremier) 47 | - [![guersam](https://avatars.githubusercontent.com/u/969120?v=4&s=20) **guersam**](https://github.com/guersam) 48 | - [![justinhj](https://avatars.githubusercontent.com/u/753059?v=4&s=20) **justinhj**](https://github.com/justinhj) 49 | - [![matsluni](https://avatars.githubusercontent.com/u/2551177?v=4&s=20) **matsluni**](https://github.com/matsluni) 50 | - [![rossabaker](https://avatars.githubusercontent.com/u/142698?v=4&s=20) **rossabaker**](https://github.com/rossabaker) 51 | - [![suhasgaddam](https://avatars.githubusercontent.com/u/7282584?v=4&s=20) **suhasgaddam**](https://github.com/suhasgaddam) 52 | - [![gitter-badger](https://avatars.githubusercontent.com/u/8518239?v=4&s=20) **gitter-badger**](https://github.com/gitter-badger) 53 | - [![gatorcse](https://avatars.githubusercontent.com/u/358979?v=4&s=20) **gatorcse**](https://github.com/gatorcse) 54 | - [![williamho](https://avatars.githubusercontent.com/u/1883086?v=4&s=20) **williamho**](https://github.com/williamho) 55 | - [![cb372](https://avatars.githubusercontent.com/u/106760?v=4&s=20) **cb372**](https://github.com/cb372) 56 | - [![davesmith00047](https://avatars.githubusercontent.com/u/119872582?v=4&s=20) **davesmith00047**](https://github.com/davesmith00047) -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | # Code of Conduct 6 | 7 | We are committed to providing a friendly, safe and welcoming 8 | environment for all, regardless of level of experience, gender, gender 9 | identity and expression, sexual orientation, disability, personal 10 | appearance, body size, race, ethnicity, age, religion, nationality, or 11 | other such characteristics. 12 | 13 | Everyone is expected to follow the 14 | [Scala Code of Conduct](https://www.scala-lang.org/conduct/) when 15 | discussing the project on the available communication channels. If you 16 | are being harassed, please contact us immediately so that we can 17 | support you. 18 | 19 | ## Moderation 20 | 21 | For any questions, concerns, or moderation requests please contact a 22 | [member of the project](AUTHORS.md#maintainers). -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | # Contributing 6 | 7 | Discussion around _fetch_ happens in the [GitHub issues](https://github.com/47degrees/fetch/issues) and [pull requests](https://github.com/47degrees/fetch/pulls). 8 | 9 | Feel free to open an issue if you notice a bug, have an idea for a feature, or have a question about 10 | the code. Pull requests are also welcome. 11 | 12 | People are expected to follow the [Code of Conduct](CODE_OF_CONDUCT.md) when discussing _fetch_ on the Github page or other venues. 13 | 14 | If you are being harassed, please contact one of [us](AUTHORS.md#maintainers) immediately so that we can support you. In case you cannot get in touch with us please write an email to [47 Degrees Open Source](mailto:hello@47deg.com). 15 | 16 | ## How can I help? 17 | 18 | _fetch_ follows a standard [fork and pull](https://help.github.com/articles/using-pull-requests/) model for contributions via GitHub pull requests. 19 | 20 | The process is simple: 21 | 22 | 1. Find something you want to work on 23 | 2. Let us know you are working on it via GitHub issues/pull requests 24 | 3. Implement your contribution 25 | 4. Write tests 26 | 5. Update the documentation 27 | 6. Submit pull request 28 | 29 | You will be automatically included in the [AUTHORS.md](AUTHORS.md#contributors) file as contributor in the next release. 30 | 31 | If you encounter any confusion or frustration during the contribution process, please create a GitHub issue and we'll do our best to improve the process. -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright (C) 2016-2023 47 Degrees Open Source 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /NOTICE.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | fetch 6 | 7 | Copyright (c) 2016-2023 47 Degrees Open Source. All rights reserved. 8 | 9 | Licensed under Apache-2.0. See [LICENSE](LICENSE.md) for terms. -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | ThisBuild / scalaVersion := scala213 2 | ThisBuild / organization := "com.47deg" 3 | ThisBuild / githubOrganization := "xebia-functional" 4 | 5 | addCommandAlias("ci-test", "scalafmtCheckAll; scalafmtSbtCheck; mdoc; ++test") 6 | addCommandAlias("ci-docs", "github; mdoc; headerCreateAll; publishMicrosite") 7 | addCommandAlias("ci-publish", "github; ci-release") 8 | 9 | lazy val scala212 = "2.12.20" 10 | lazy val scala213 = "2.13.16" 11 | lazy val scala3Version = "3.7.1" 12 | lazy val scala2Versions = Seq(scala212, scala213) 13 | lazy val allScalaVersions = scala2Versions :+ scala3Version 14 | 15 | publish / skip := true 16 | 17 | lazy val fetch = crossProject(JSPlatform, JVMPlatform) 18 | .crossType(CrossType.Pure) 19 | .settings(commonCrossDependencies) 20 | .settings(crossScalaVersions := allScalaVersions) 21 | 22 | lazy val fetchJVM = fetch.jvm 23 | lazy val fetchJS = fetch.js 24 | .settings(crossScalaVersions := scala2Versions) 25 | 26 | lazy val `fetch-debug` = crossProject(JSPlatform, JVMPlatform) 27 | .crossType(CrossType.Pure) 28 | .dependsOn(fetch) 29 | .settings(commonCrossDependencies) 30 | .settings(crossScalaVersions := allScalaVersions) 31 | 32 | lazy val debugJVM = `fetch-debug`.jvm 33 | lazy val debugJS = `fetch-debug`.js 34 | .settings(crossScalaVersions := scala2Versions) 35 | 36 | lazy val `fetch-examples` = project 37 | .dependsOn(fetchJVM, debugJVM) 38 | .settings(publish / skip := true) 39 | .settings(examplesSettings: _*) 40 | .settings(crossScalaVersions := scala2Versions) 41 | 42 | lazy val microsite = project 43 | .dependsOn(fetchJVM, debugJVM) 44 | .settings(docsSettings: _*) 45 | .settings(publish / skip := true) 46 | .enablePlugins(MicrositesPlugin, MdocPlugin) 47 | .settings(crossScalaVersions := scala2Versions) 48 | 49 | lazy val documentation = project 50 | .dependsOn(fetchJVM) 51 | .settings(publish / skip := true) 52 | .settings(mdocOut := file(".")) 53 | .enablePlugins(MdocPlugin) 54 | .settings(crossScalaVersions := scala2Versions) 55 | -------------------------------------------------------------------------------- /docs/AUTHORS.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | # Authors 6 | 7 | ## Maintainers 8 | 9 | The maintainers of the project are: 10 | 11 | @COLLABORATORS@ 12 | 13 | ## Contributors 14 | 15 | These are the people that have contributed to the _@NAME@_ project: 16 | 17 | @CONTRIBUTORS@ -------------------------------------------------------------------------------- /docs/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | # Code of Conduct 6 | 7 | We are committed to providing a friendly, safe and welcoming 8 | environment for all, regardless of level of experience, gender, gender 9 | identity and expression, sexual orientation, disability, personal 10 | appearance, body size, race, ethnicity, age, religion, nationality, or 11 | other such characteristics. 12 | 13 | Everyone is expected to follow the 14 | [Scala Code of Conduct](https://www.scala-lang.org/conduct/) when 15 | discussing the project on the available communication channels. If you 16 | are being harassed, please contact us immediately so that we can 17 | support you. 18 | 19 | ## Moderation 20 | 21 | For any questions, concerns, or moderation requests please contact a 22 | [member of the project](AUTHORS.md#maintainers). -------------------------------------------------------------------------------- /docs/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | # Contributing 6 | 7 | Discussion around _@NAME@_ happens in the [GitHub issues](https://github.com/@REPO@/issues) and [pull requests](https://github.com/@REPO@/pulls). 8 | 9 | Feel free to open an issue if you notice a bug, have an idea for a feature, or have a question about 10 | the code. Pull requests are also welcome. 11 | 12 | People are expected to follow the [Code of Conduct](CODE_OF_CONDUCT.md) when discussing _@NAME@_ on the Github page or other venues. 13 | 14 | If you are being harassed, please contact one of [us](AUTHORS.md#maintainers) immediately so that we can support you. In case you cannot get in touch with us please write an email to [@ORG_NAME@](mailto:@ORG_EMAIL@). 15 | 16 | ## How can I help? 17 | 18 | _@NAME@_ follows a standard [fork and pull](https://help.github.com/articles/using-pull-requests/) model for contributions via GitHub pull requests. 19 | 20 | The process is simple: 21 | 22 | 1. Find something you want to work on 23 | 2. Let us know you are working on it via GitHub issues/pull requests 24 | 3. Implement your contribution 25 | 4. Write tests 26 | 5. Update the documentation 27 | 6. Submit pull request 28 | 29 | You will be automatically included in the [AUTHORS.md](AUTHORS.md#contributors) file as contributor in the next release. 30 | 31 | If you encounter any confusion or frustration during the contribution process, please create a GitHub issue and we'll do our best to improve the process. -------------------------------------------------------------------------------- /docs/LICENSE.md: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright (C) @YEAR_RANGE@ @COPYRIGHT_OWNER@ 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /docs/NOTICE.md: -------------------------------------------------------------------------------- 1 | [comment]: <> (Don't edit this file!) 2 | [comment]: <> (It is automatically updated after every release of https://github.com/47degrees/.github) 3 | [comment]: <> (If you want to suggest a change, please open a PR or issue in that repository) 4 | 5 | @NAME@ 6 | 7 | Copyright (c) @YEAR_RANGE@ @ORG_NAME@. All rights reserved. 8 | 9 | Licensed under @LICENSE@. See [LICENSE](LICENSE.md) for terms. -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Fetch 2 | 3 | [![Join the chat at https://gitter.im/47deg/fetch](https://badges.gitter.im/47deg/fetch.svg)](https://gitter.im/47deg/fetch?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Maven Central](https://img.shields.io/badge/maven%20central-1.2.1-green.svg)](https://oss.sonatype.org/#nexus-search;gav~com.47deg~fetch*) [![License](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://raw.githubusercontent.com/xebia-functional/fetch/master/LICENSE.md) [![Latest version](https://img.shields.io/badge/fetch-1.2.1-green.svg)](https://index.scala-lang.org/47deg/fetch) [![Scala.js](http://scala-js.org/assets/badges/scalajs-0.6.15.svg)](http://scala-js.org) [![GitHub Issues](https://img.shields.io/github/issues/47deg/fetch.svg)](https://github.com/xebia-functional/fetch/issues) 4 | 5 | A library for Simple & Efficient data access in Scala and Scala.js 6 | 7 | - [Documentation](https://xebia-functional.github.io/fetch/docs) 8 | 9 | ```scala mdoc:toc 10 | ``` 11 | 12 | ## Installation 13 | 14 | Add the following dependency to your project's build file. 15 | 16 | For Scala 2.12.x through 3.x: 17 | 18 | ```scala 19 | "com.47deg" %% "fetch" % "@VERSION@" 20 | ``` 21 | 22 | Or, if using Scala.js (1.8.x): 23 | 24 | ```scala 25 | "com.47deg" %%% "fetch" % "@VERSION@" 26 | ``` 27 | 28 | ```scala mdoc:invisible 29 | val out = Console.out 30 | 31 | def println(msg: String): Unit = { 32 | Console.withOut(out) { 33 | Console.println(msg) 34 | } 35 | } 36 | ``` 37 | 38 | ## Remote data 39 | 40 | Fetch is a library for making access to data both simple and efficient. Fetch is especially useful when querying data that 41 | has a latency cost, such as databases or web services. 42 | 43 | ## Define your data sources 44 | 45 | To tell Fetch how to get the data you want, you must implement the `DataSource` typeclass. Data sources have `fetch` and `batch` methods that define how to fetch such a piece of data. 46 | 47 | Data Sources take two type parameters: 48 | 49 |
    50 |
  1. Identity is a type that has enough information to fetch the data
  2. 51 |
  3. Result is the type of data we want to fetch
  4. 52 |
53 | 54 | ```scala 55 | import cats.data.NonEmptyList 56 | import cats.effect.Concurrent 57 | 58 | trait DataSource[F[_], Identity, Result]{ 59 | def data: Data[Identity, Result] 60 | def CF: Concurrent[F] 61 | def fetch(id: Identity): F[Option[Result]] 62 | def batch(ids: NonEmptyList[Identity]): F[Map[Identity, Result]] 63 | } 64 | ``` 65 | 66 | Returning `Concurrent` instances from the fetch methods allows us to specify if the fetch must run synchronously or asynchronously, and use all the goodies available in `cats` and `cats-effect`. 67 | 68 | We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. 69 | 70 | ```scala mdoc:silent 71 | import cats._ 72 | import cats.data.NonEmptyList 73 | import cats.effect._ 74 | import cats.implicits._ 75 | 76 | import fetch._ 77 | 78 | def latency[F[_] : Sync](milis: Long): F[Unit] = 79 | Sync[F].delay(Thread.sleep(milis)) 80 | 81 | object ToString extends Data[Int, String] { 82 | def name = "To String" 83 | 84 | def source[F[_] : Async]: DataSource[F, Int, String] = new DataSource[F, Int, String]{ 85 | override def data = ToString 86 | 87 | override def CF = Concurrent[F] 88 | 89 | override def fetch(id: Int): F[Option[String]] = for { 90 | _ <- CF.delay(println(s"--> [${Thread.currentThread.getId}] One ToString $id")) 91 | _ <- latency(100) 92 | _ <- CF.delay(println(s"<-- [${Thread.currentThread.getId}] One ToString $id")) 93 | } yield Option(id.toString) 94 | 95 | override def batch(ids: NonEmptyList[Int]): F[Map[Int, String]] = for { 96 | _ <- CF.delay(println(s"--> [${Thread.currentThread.getId}] Batch ToString $ids")) 97 | _ <- latency(100) 98 | _ <- CF.delay(println(s"<-- [${Thread.currentThread.getId}] Batch ToString $ids")) 99 | } yield ids.toList.map(i => (i, i.toString)).toMap 100 | } 101 | } 102 | 103 | def fetchString[F[_] : Async](n: Int): Fetch[F, String] = 104 | Fetch(n, ToString.source) 105 | ``` 106 | 107 | ## Creating a runtime 108 | 109 | Since we'll use `IO` from the `cats-effect` library to execute our fetches, we'll need an `IORuntime` for executing our `IO` instances. 110 | 111 | ```scala mdoc:silent 112 | import cats.effect.unsafe.implicits.global //Gives us an IORuntime in places it is normally not provided 113 | ``` 114 | 115 | Normally, in your applications, this is provided by `IOApp`, and you should not need to import this except in limited scenarios such as test environments that do not have Cats Effect integration. 116 | For more information, and particularly on why you would usually not want to make one of these yourself, [see this post by Daniel Spiewak](https://github.com/typelevel/cats-effect/discussions/1562#discussioncomment-254838) 117 | 118 | ## Creating and running a fetch 119 | 120 | Now that we can convert `Int` values to `Fetch[F, String]`, let's try creating a fetch. 121 | 122 | ```scala mdoc:silent 123 | def fetchOne[F[_] : Async]: Fetch[F, String] = 124 | fetchString(1) 125 | ``` 126 | 127 | Let's run it and wait for the fetch to complete. We'll use `IO#unsafeRunTimed` for testing purposes, which will run an `IO[A]` to `Option[A]` and return `None` if it didn't complete in time: 128 | 129 | ```scala mdoc 130 | import scala.concurrent.duration._ 131 | 132 | Fetch.run[IO](fetchOne).unsafeRunTimed(5.seconds) 133 | ``` 134 | 135 | As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. 136 | 137 | ## Batching 138 | 139 | Multiple fetches to the same data source are automatically batched. For illustrating this, we are going to compose three independent fetch results as a tuple. 140 | 141 | ```scala mdoc:silent 142 | def fetchThree[F[_] : Async]: Fetch[F, (String, String, String)] = 143 | (fetchString(1), fetchString(2), fetchString(3)).tupled 144 | ``` 145 | 146 | When executing the above fetch, note how the three identities get batched, and the data source is only queried once. 147 | 148 | ```scala mdoc 149 | Fetch.run[IO](fetchThree).unsafeRunTimed(5.seconds) 150 | ``` 151 | 152 | Note that the `DataSource#batch` method is not mandatory. It will be implemented in terms of `DataSource#fetch` if you don't provide an implementation. 153 | 154 | ```scala mdoc:silent 155 | object UnbatchedToString extends Data[Int, String] { 156 | def name = "Unbatched to string" 157 | 158 | def source[F[_]: Async] = new DataSource[F, Int, String] { 159 | override def data = UnbatchedToString 160 | 161 | override def CF = Concurrent[F] 162 | 163 | override def fetch(id: Int): F[Option[String]] = 164 | CF.delay(println(s"--> [${Thread.currentThread.getId}] One UnbatchedToString $id")) >> 165 | latency(100) >> 166 | CF.delay(println(s"<-- [${Thread.currentThread.getId}] One UnbatchedToString $id")) >> 167 | CF.pure(Option(id.toString)) 168 | } 169 | } 170 | 171 | def unbatchedString[F[_]: Async](n: Int): Fetch[F, String] = 172 | Fetch(n, UnbatchedToString.source) 173 | ``` 174 | 175 | Let's create a tuple of unbatched string requests. 176 | 177 | ```scala mdoc:silent 178 | def fetchUnbatchedThree[F[_] : Async]: Fetch[F, (String, String, String)] = 179 | (unbatchedString(1), unbatchedString(2), unbatchedString(3)).tupled 180 | ``` 181 | 182 | When executing the above fetch, note how the three identities get requested in parallel. You can override `batch` to execute queries sequentially if you need to. 183 | 184 | ```scala mdoc 185 | Fetch.run[IO](fetchUnbatchedThree).unsafeRunTimed(5.seconds) 186 | ``` 187 | 188 | ## Parallelism 189 | 190 | If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. 191 | 192 | ```scala mdoc:silent 193 | object Length extends Data[String, Int] { 194 | def name = "Length" 195 | 196 | def source[F[_] : Async] = new DataSource[F, String, Int] { 197 | override def data = Length 198 | 199 | override def CF = Concurrent[F] 200 | 201 | override def fetch(id: String): F[Option[Int]] = for { 202 | _ <- CF.delay(println(s"--> [${Thread.currentThread.getId}] One Length $id")) 203 | _ <- latency(100) 204 | _ <- CF.delay(println(s"<-- [${Thread.currentThread.getId}] One Length $id")) 205 | } yield Option(id.size) 206 | 207 | override def batch(ids: NonEmptyList[String]): F[Map[String, Int]] = for { 208 | _ <- CF.delay(println(s"--> [${Thread.currentThread.getId}] Batch Length $ids")) 209 | _ <- latency(100) 210 | _ <- CF.delay(println(s"<-- [${Thread.currentThread.getId}] Batch Length $ids")) 211 | } yield ids.toList.map(i => (i, i.size)).toMap 212 | } 213 | } 214 | 215 | def fetchLength[F[_] : Async](s: String): Fetch[F, Int] = 216 | Fetch(s, Length.source) 217 | ``` 218 | 219 | And now we can easily receive data from the two sources in a single fetch. 220 | 221 | ```scala mdoc:silent 222 | def fetchMulti[F[_] : Async]: Fetch[F, (String, Int)] = 223 | (fetchString(1), fetchLength("one")).tupled 224 | ``` 225 | 226 | Note how the two independent data fetches run in parallel, minimizing the latency cost of querying the two data sources. 227 | 228 | ```scala mdoc 229 | Fetch.run[IO](fetchMulti).unsafeRunTimed(5.seconds) 230 | ``` 231 | 232 | ## Deduplication & Caching 233 | 234 | The Fetch library supports deduplication and optional caching. 235 | By default, fetches that are chained together will share the same cache backend, providing some deduplication. 236 | 237 | When fetching an identity twice within the same `Fetch`, such as a batch of fetches or when you `flatMap` one fetch into another, subsequent fetches for the same identity are cached. 238 | Let's try creating a fetch that asks for the same identity twice, by using `flatMap` (in a for-comprehension) to chain the requests together: 239 | 240 | ```scala mdoc:silent 241 | def fetchTwice[F[_] : Async]: Fetch[F, (String, String)] = for { 242 | one <- fetchString(1) 243 | two <- fetchString(1) 244 | } yield (one, two) 245 | ``` 246 | 247 | While running it, notice that the data source is only queried once. 248 | The next time the identity is requested, it's served from the internal cache. 249 | 250 | ```scala mdoc:silent 251 | val runFetchTwice = Fetch.run[IO](fetchTwice) 252 | ``` 253 | ```scala mdoc 254 | runFetchTwice.unsafeRunTimed(5.seconds) 255 | ``` 256 | 257 | This will still fetch the data again, however, if we call it once more: 258 | ```scala mdoc 259 | runFetchTwice.unsafeRunTimed(5.seconds) 260 | ``` 261 | 262 | If we want to cache between multiple individual fetches, you should use `Fetch.runCache` or `Fetch.runAll` to return the cache for reusing later. 263 | Here is an example where we fetch four separate times, and explicitly share the cache to keep the deduplication functionality: 264 | 265 | ```scala mdoc:silent 266 | //We get the cache from the first run and pass it to all subsequent fetches 267 | val runFetchFourTimesSharedCache = for { 268 | (cache, one) <- Fetch.runCache[IO](fetchString(1)) 269 | two <- Fetch.run[IO](fetchString(1), cache) 270 | three <- Fetch.run[IO](fetchString(1), cache) 271 | four <- Fetch.run[IO](fetchString(1), cache) 272 | } yield (one, two, three, four) 273 | ``` 274 | ```scala mdoc 275 | runFetchFourTimesSharedCache.unsafeRunTimed(5.seconds) 276 | ``` 277 | 278 | As you can see above, the cache will now work between calls and can be used to deduplicate requests over a period of time. 279 | Note that this does not support any kind of automatic cache invalidation, so you will need to keep track of which values you want to re-fetch if you plan on sharing the cache. 280 | --- 281 | 282 | For more in-depth information, take a look at our [documentation](https://xebia-functional.github.io/fetch/docs.html). 283 | 284 | # Copyright 285 | 286 | Fetch is designed and developed by 47 Degrees 287 | 288 | Copyright (C) @YEAR_RANGE@ 47 Degrees. 289 | -------------------------------------------------------------------------------- /fetch-debug/src/main/scala/debug.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats.instances.all._ 20 | import cats.syntax.all._ 21 | 22 | object debug { 23 | import fetch.document.Document 24 | 25 | def string(doc: Document): String = { 26 | val writer = new java.io.StringWriter 27 | doc.format(1, writer) 28 | writer.toString 29 | } 30 | 31 | def pile(docs: Seq[Document]): Document = 32 | docs.foldLeft(Document.empty: Document)(_ :/: _) 33 | 34 | def showDuration(millis: Long): Document = { 35 | val secs = millis / 1e3 36 | Document.text(f" 🕛 $secs%1.2f seconds") 37 | } 38 | 39 | def firstRequest(r: Round): Option[Long] = 40 | for { 41 | aQuery <- r.queries.headOption 42 | firstR = r.queries.foldLeft(aQuery.start) { case (acc, q) => 43 | acc min q.start 44 | } 45 | } yield firstR 46 | 47 | def lastRequest(r: Round): Option[Long] = 48 | for { 49 | aQuery <- r.queries.headOption 50 | lastR = r.queries.foldLeft(aQuery.end) { case (acc, q) => 51 | acc max q.end 52 | } 53 | } yield lastR 54 | 55 | def showLog(log: Log): Document = 56 | log.rounds match { 57 | case Nil => Document.empty 58 | case _ => 59 | val duration: Option[Long] = for { 60 | firstRound <- log.rounds.headOption 61 | firstRequestStart <- firstRequest(firstRound) 62 | lastRound <- log.rounds.lastOption 63 | lastRequestEnd <- lastRequest(lastRound) 64 | } yield lastRequestEnd - firstRequestStart 65 | val durationDoc = 66 | duration.fold(Document.empty: Document)((d: Long) => 67 | Document.text("Fetch execution") :-: showDuration(d) 68 | ) 69 | 70 | durationDoc :/: Document.nest( 71 | 2, 72 | pile(log.rounds.mapWithIndex((r, i) => showRound(r, i + 1))) 73 | ) 74 | } 75 | 76 | def showRound(r: Round, n: Int): Document = { 77 | val roundDuration = for { 78 | f <- firstRequest(r) 79 | l <- lastRequest(r) 80 | } yield l - f 81 | 82 | val round = 83 | Document.text(s"[Round $n]") :-: roundDuration.fold(Document.text(""))(showDuration(_)) 84 | 85 | round :-: Document.nest( 86 | 2, 87 | pile(r.queries.map(showRequest)) 88 | ) 89 | } 90 | 91 | def showRequest(r: Request): Document = 92 | r.request match { 93 | case FetchOne(id, d, cached) => 94 | Document.text(s"[Fetch one] From `${d.name}` with id $id cached $cached") :-: 95 | showDuration(r.duration) 96 | case Batch(ids, d, cached) => 97 | Document.text(s"[Batch] From `${d.name}` with ids ${ids.toList} cached $cached") :-: 98 | showDuration(r.duration) 99 | } 100 | 101 | def showMissing(d: Data[_, _], ids: List[_]): Document = 102 | Document.text(s"`${d.name}` missing identities $ids") 103 | 104 | def showRoundCount(err: FetchException): Document = 105 | Document.text(s", fetch interrupted after ${err.log.rounds.size} rounds") 106 | 107 | def showException(err: FetchException): Document = 108 | err match { 109 | case MissingIdentity(id, q, log) => 110 | Document 111 | .text( 112 | s"[ERROR] Identity with id `$id` for data source `${q.data.name}` not found" 113 | ) :-: showRoundCount( 114 | err 115 | ) 116 | case UnhandledException(exc, log) => 117 | Document 118 | .text( 119 | s"[ERROR] Unhandled `${exc.getClass.getName}`: '${exc.getMessage}'" 120 | ) :-: showRoundCount( 121 | err 122 | ) 123 | } 124 | 125 | /* Given a [[fetch.env.Log]], describe it with a human-readable string. */ 126 | def describe(log: Log): String = 127 | string(showLog(log)) 128 | 129 | /* Given a [[Throwable]], describe it with a human-readable string. */ 130 | def describe(err: Throwable): String = 131 | err match { 132 | case fe: FetchException => 133 | string( 134 | showException(fe) :/: 135 | Document.nest(2, showLog(fe.log)) 136 | ) 137 | case _ => string(Document.text("Unexpected exception")) 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /fetch-debug/src/main/scala/document.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch.document 18 | 19 | import java.io.Writer 20 | 21 | case object DocNil extends Document 22 | case object DocBreak extends Document 23 | case class DocText(txt: String) extends Document 24 | case class DocGroup(doc: Document) extends Document 25 | case class DocNest(indent: Int, doc: Document) extends Document 26 | case class DocCons(hd: Document, tl: Document) extends Document 27 | 28 | /** 29 | * A basic pretty-printing library, based on Lindig's strict version of Wadler's adaptation of 30 | * Hughes' pretty-printer. 31 | * 32 | * @author 33 | * Michel Schinz 34 | * @version 1.0 35 | */ 36 | abstract class Document { 37 | def :-:(hd: Document): Document = DocCons(hd, this) 38 | def :-:(hd: String): Document = DocCons(DocText(hd), this) 39 | def :/:(hd: Document): Document = hd :-: DocBreak :-: this 40 | def :/:(hd: String): Document = hd :-: DocBreak :-: this 41 | 42 | /** 43 | * Format this document on `writer` and try to set line breaks so that the result fits in `width` 44 | * columns. 45 | */ 46 | def format(width: Int, writer: Writer): Unit = { 47 | type FmtState = (Int, Boolean, Document) 48 | 49 | def fits(w: Int, state: List[FmtState]): Boolean = 50 | state match { 51 | case _ if w < 0 => 52 | false 53 | case List() => 54 | true 55 | case (_, _, DocNil) :: z => 56 | fits(w, z) 57 | case (i, b, DocCons(h, t)) :: z => 58 | fits(w, (i, b, h) :: (i, b, t) :: z) 59 | case (_, _, DocText(t)) :: z => 60 | fits(w - t.length(), z) 61 | case (i, b, DocNest(ii, d)) :: z => 62 | fits(w, (i + ii, b, d) :: z) 63 | case (_, false, DocBreak) :: z => 64 | fits(w - 1, z) 65 | case (_, true, DocBreak) :: z => 66 | true 67 | case (i, _, DocGroup(d)) :: z => 68 | fits(w, (i, false, d) :: z) 69 | } 70 | 71 | def spaces(n: Int): Unit = { 72 | var rem = n 73 | while (rem >= 16) { writer write " "; rem -= 16 } 74 | if (rem >= 8) { writer write " "; rem -= 8 } 75 | if (rem >= 4) { writer write " "; rem -= 4 } 76 | if (rem >= 2) { writer write " "; rem -= 2 } 77 | if (rem == 1) writer write " " 78 | } 79 | 80 | def fmt(k: Int, state: List[FmtState]): Unit = 81 | state match { 82 | case List() => () 83 | case (_, _, DocNil) :: z => 84 | fmt(k, z) 85 | case (i, b, DocCons(h, t)) :: z => 86 | fmt(k, (i, b, h) :: (i, b, t) :: z) 87 | case (i, _, DocText(t)) :: z => 88 | writer write t 89 | fmt(k + t.length(), z) 90 | case (i, b, DocNest(ii, d)) :: z => 91 | fmt(k, (i + ii, b, d) :: z) 92 | case (i, true, DocBreak) :: z => 93 | writer write "\n" 94 | spaces(i) 95 | fmt(i, z) 96 | case (i, false, DocBreak) :: z => 97 | writer write " " 98 | fmt(k + 1, z) 99 | case (i, b, DocGroup(d)) :: z => 100 | val fitsFlat = fits(width - k, (i, false, d) :: z) 101 | fmt(k, (i, !fitsFlat, d) :: z) 102 | case _ => 103 | () 104 | } 105 | 106 | fmt(0, (0, false, DocGroup(this)) :: Nil) 107 | } 108 | } 109 | 110 | object Document { 111 | 112 | /** 113 | * The empty document 114 | */ 115 | def empty = DocNil 116 | 117 | /** 118 | * A break, which will either be turned into a space or a line break 119 | */ 120 | def break = DocBreak 121 | 122 | /** 123 | * A document consisting of some text literal 124 | */ 125 | def text(s: String): Document = DocText(s) 126 | 127 | /** 128 | * A group, whose components will either be printed with all breaks rendered as spaces, or with 129 | * all breaks rendered as line breaks. 130 | */ 131 | def group(d: Document): Document = DocGroup(d) 132 | 133 | /** 134 | * A nested document, which will be indented as specified. 135 | */ 136 | def nest(i: Int, d: Document): Document = DocNest(i, d) 137 | } 138 | -------------------------------------------------------------------------------- /fetch-examples/src/test/resources/simplelogger.properties: -------------------------------------------------------------------------------- 1 | org.slf4j.simpleLogger.log.org.http4s.blaze.client.PoolManager=warn -------------------------------------------------------------------------------- /fetch-examples/src/test/scala/DoobieExample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import cats.data.NonEmptyList 18 | import cats.effect._ 19 | import cats.instances.list._ 20 | import cats.syntax.all._ 21 | 22 | import doobie.{Query => _, _} 23 | import doobie.h2.H2Transactor 24 | import doobie.util.ExecutionContexts 25 | 26 | import org.scalatest._ 27 | import org.scalatest.matchers.should.Matchers 28 | import org.scalatest.wordspec.AnyWordSpec 29 | 30 | import scala.concurrent.ExecutionContext 31 | import java.util.concurrent.Executors 32 | 33 | import fetch._ 34 | import fetch.syntax._ 35 | 36 | object DatabaseExample { 37 | case class AuthorId(id: Int) 38 | case class Author(id: Int, name: String) 39 | 40 | object Queries { 41 | 42 | import doobie.implicits._ 43 | 44 | implicit val authorIdMeta: Meta[AuthorId] = 45 | Meta[Int].imap(AuthorId(_))(_.id) 46 | 47 | def fetchById(id: AuthorId): ConnectionIO[Option[Author]] = 48 | sql"SELECT * FROM author WHERE id = $id".query[Author].option 49 | 50 | def fetchByIds(ids: NonEmptyList[AuthorId]): ConnectionIO[List[Author]] = { 51 | val q = fr"SELECT * FROM author WHERE" ++ Fragments.in(fr"id", ids) 52 | q.query[Author].to[List] 53 | } 54 | } 55 | 56 | object Database { 57 | def connectionPool[F[_]: Sync](n: Int): Resource[F, ExecutionContext] = 58 | ExecutionContexts.fixedThreadPool[F](n) 59 | 60 | def transactionPool[F[_]: Sync]: Resource[F, ExecutionContext] = 61 | ExecutionContexts.cachedThreadPool 62 | 63 | import doobie.implicits._ 64 | 65 | def createTable[F[_]: Sync](tx: Transactor[F]) = sql""" 66 | CREATE TABLE author ( 67 | id INTEGER PRIMARY KEY, 68 | name VARCHAR(20) NOT NULL UNIQUE 69 | ) 70 | """.update.run.transact(tx) 71 | 72 | def dropTable[F[_]: Sync](tx: Transactor[F]) = 73 | sql"DROP TABLE IF EXISTS author".update.run.transact(tx) 74 | 75 | def addAuthor[F[_]: Sync](author: Author)(tx: Transactor[F]) = 76 | sql"INSERT INTO author (id, name) VALUES(${author.id}, ${author.name})".update.run 77 | .transact(tx) 78 | 79 | val authors: List[Author] = 80 | List("William Shakespeare", "Charles Dickens", "George Orwell").zipWithIndex.map { 81 | case (name, id) => Author(id + 1, name) 82 | } 83 | 84 | def createTransactor[F[_]: Async] = 85 | for { 86 | conn <- connectionPool[F](1) 87 | tx <- 88 | H2Transactor 89 | .newH2Transactor[F]( 90 | "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1", 91 | "sa", 92 | "", 93 | conn 94 | ) 95 | } yield tx 96 | } 97 | 98 | object Authors extends Data[AuthorId, Author] { 99 | 100 | import doobie.implicits._ 101 | def name = "Authors" 102 | 103 | def db[F[_]: Async]: DataSource[F, AuthorId, Author] = 104 | new DataSource[F, AuthorId, Author] { 105 | def data = Authors 106 | 107 | override def CF = Concurrent[F] 108 | 109 | override def fetch(id: AuthorId): F[Option[Author]] = 110 | Database 111 | .createTransactor[F] 112 | .use(Queries.fetchById(id).transact(_)) 113 | 114 | override def batch(ids: NonEmptyList[AuthorId]): F[Map[AuthorId, Author]] = 115 | Database 116 | .createTransactor[F] 117 | .use(Queries.fetchByIds(ids).transact(_)) 118 | .map(authors => authors.map(a => AuthorId(a.id) -> a).toMap) 119 | } 120 | 121 | def fetchAuthor[F[_]: Async](id: Int): Fetch[F, Author] = 122 | Fetch(AuthorId(id), Authors.db) 123 | } 124 | } 125 | 126 | class DoobieExample extends AnyWordSpec with Matchers with BeforeAndAfterAll { 127 | import DatabaseExample._ 128 | import Database._ 129 | 130 | val executionContext = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(2)) 131 | implicit val ioRuntime: unsafe.IORuntime = unsafe.IORuntime.global 132 | implicit val transactor: Resource[IO, Transactor[IO]] = createTransactor[IO] 133 | 134 | override def beforeAll(): Unit = 135 | (transactor 136 | .use { tx => 137 | createTable(tx) *> authors.traverse(addAuthor(_)(tx)) 138 | }) 139 | .void 140 | .unsafeRunSync() 141 | override def afterAll(): Unit = transactor.use(dropTable(_)).void.unsafeRunSync() 142 | 143 | "We can fetch one author from the DB" in { 144 | val io: IO[(Log, Author)] = Fetch.runLog[IO](Authors.fetchAuthor(1)) 145 | 146 | val (log, result) = io.unsafeRunSync() 147 | 148 | result shouldEqual Author(1, "William Shakespeare") 149 | log.rounds.size shouldEqual 1 150 | } 151 | 152 | "We can fetch multiple authors from the DB in parallel" in { 153 | def fetch[F[_]: Async]: Fetch[F, List[Author]] = 154 | List(1, 2).map(Authors.fetchAuthor[F]).batchAll 155 | 156 | val io: IO[(Log, List[Author])] = Fetch.runLog[IO](fetch) 157 | 158 | val (log, result) = io.unsafeRunSync() 159 | 160 | result shouldEqual Author(1, "William Shakespeare") :: Author(2, "Charles Dickens") :: Nil 161 | log.rounds.size shouldEqual 1 162 | } 163 | 164 | "We can fetch multiple authors from the DB using a for comprehension" in { 165 | def fetch[F[_]: Async]: Fetch[F, List[Author]] = 166 | for { 167 | a <- Authors.fetchAuthor(1) 168 | b <- Authors.fetchAuthor(a.id + 1) 169 | } yield List(a, b) 170 | 171 | val io: IO[(Log, List[Author])] = Fetch.runLog[IO](fetch) 172 | 173 | val (log, result) = io.unsafeRunSync() 174 | 175 | result shouldEqual Author(1, "William Shakespeare") :: Author(2, "Charles Dickens") :: Nil 176 | log.rounds.size shouldEqual 2 177 | } 178 | 179 | } 180 | -------------------------------------------------------------------------------- /fetch-examples/src/test/scala/GithubExample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import cats.effect._ 18 | import cats.syntax.all._ 19 | import fetch.{Data, DataSource, Fetch} 20 | import fetch.syntax._ 21 | import io.circe._ 22 | import io.circe.generic.semiauto._ 23 | import org.http4s._ 24 | import org.http4s.blaze.client._ 25 | import org.http4s.circe._ 26 | import org.http4s.client._ 27 | import org.scalatest.matchers.should.Matchers 28 | import org.scalatest.wordspec.AnyWordSpec 29 | import org.typelevel.ci.CIString 30 | import org.typelevel.log4cats.Logger 31 | import org.typelevel.log4cats.slf4j.Slf4jLogger 32 | 33 | import java.io.{BufferedReader, BufferedWriter, File, FileReader, FileWriter} 34 | 35 | class GithubExample extends AnyWordSpec with Matchers { 36 | implicit val ioRuntime: unsafe.IORuntime = unsafe.IORuntime.global 37 | 38 | val ACCESS_TOKEN: String = sys.env("GITHUB_TOKEN") 39 | val CACHE_FOLDER: String = sys.env.getOrElse("GITHUB_RESPONSES", "target/github-responses") 40 | 41 | def cleanUpCache: IO[Unit] = IO { 42 | val folder = new File(CACHE_FOLDER) 43 | if (folder.isDirectory) folder.listFiles().toList else Nil 44 | }.flatMap(_.traverse_(f => IO.blocking(f.delete()).attempt.void)) 45 | 46 | // http4s client which is used by the datasources 47 | 48 | object FileMiddleware { 49 | 50 | def file(uri: Uri): File = 51 | new File(CACHE_FOLDER + "/" + uri.path.toString.replaceAll("/", "_") + ".json") 52 | 53 | def fileWriter[F[_]: Async](file: File): Resource[F, BufferedWriter] = 54 | Resource.fromAutoCloseable(Async[F].blocking(new BufferedWriter(new FileWriter(file)))) 55 | 56 | def fileReader[F[_]: Async](file: File): Resource[F, BufferedReader] = 57 | Resource.fromAutoCloseable(Async[F].blocking(new BufferedReader(new FileReader(file)))) 58 | 59 | def apply[F[_]: Async]: Client[F] => Client[F] = { client => 60 | val logger: Logger[F] = Slf4jLogger.getLogger[F] 61 | Client[F] { request => 62 | val cacheFile = file(request.uri) 63 | for { 64 | _ <- Resource.eval(Async[F].catchNonFatal(cacheFile.getParentFile.mkdirs())) 65 | json <- 66 | if (cacheFile.exists()) { 67 | fileReader(cacheFile) 68 | .evalMap(br => 69 | Async[F].delay( 70 | Stream.continually(br.readLine()).takeWhile(_ != null).mkString("\n") 71 | ) 72 | ) 73 | .evalMap(s => Async[F].fromEither(io.circe.parser.parse(s))) 74 | .evalTap(_ => logger.info(s"Got JSON for uri ${request.uri} from cache")) 75 | } else { 76 | client 77 | .run(request) 78 | .evalMap(_.as[Json]) 79 | .flatTap(json => 80 | fileWriter(cacheFile).evalMap(fw => Async[F].blocking(fw.write(json.spaces4))) 81 | ) 82 | .evalTap(_ => logger.info(s"Request to ${request.uri}")) 83 | } 84 | } yield Response[F](Status.Ok).withEntity[Json](json) 85 | } 86 | } 87 | } 88 | 89 | def client[F[_]: Async]: Resource[F, Client[F]] = 90 | BlazeClientBuilder[F].resource.map(FileMiddleware.apply) 91 | 92 | // -- repos 93 | 94 | type Org = String 95 | case class Repo( 96 | name: String, 97 | fork: Boolean, 98 | forks_count: Int, 99 | stargazers_count: Int, 100 | watchers_count: Int, 101 | languages_url: String, 102 | contributors_url: String 103 | ) 104 | 105 | object OrgRepos extends Data[Org, List[Repo]] { 106 | def name = "Org repositories" 107 | 108 | implicit val repoD: Decoder[Repo] = deriveDecoder 109 | 110 | def source[F[_]: Async]: DataSource[F, Org, List[Repo]] = 111 | new DataSource[F, Org, List[Repo]] { 112 | implicit val reposED: EntityDecoder[F, List[Repo]] = jsonOf 113 | 114 | def CF: Concurrent[F] = Concurrent[F] 115 | 116 | def data: Data[Org, List[Repo]] = OrgRepos 117 | 118 | def fetch(org: Org): F[Option[List[Repo]]] = { 119 | client[F].use { c => 120 | val url = 121 | GITHUB / "orgs" / org / "repos" +? ("type", "public") +? ("per_page", 100) 122 | val req = Request[F](Method.GET, url).withHeaders( 123 | Header.Raw(CIString("Authorization"), s"token $ACCESS_TOKEN") 124 | ) 125 | fetchCollectionRecursively[F, Repo](c, req).map(Option(_)) 126 | } 127 | } 128 | } 129 | } 130 | 131 | def orgRepos[F[_]: Async](org: Org): Fetch[F, List[Repo]] = 132 | Fetch(org, OrgRepos.source) 133 | 134 | // -- languages 135 | 136 | type Language = String 137 | 138 | object Languages extends Data[Repo, List[Language]] { 139 | def name = "Languages" 140 | 141 | def source[F[_]: Async]: DataSource[F, Repo, List[Language]] = 142 | new DataSource[F, Repo, List[Language]] { 143 | implicit val langD: Decoder[List[Language]] = Decoder[JsonObject].map( 144 | _.toList.map(_._1) 145 | ) 146 | implicit val langED: EntityDecoder[F, List[Language]] = jsonOf 147 | 148 | def CF: Concurrent[F] = Concurrent[F] 149 | 150 | def data: Data[Repo, List[Language]] = Languages 151 | 152 | def fetch(repo: Repo): F[Option[List[Language]]] = { 153 | client[F].use { c => 154 | val url = Uri.unsafeFromString(repo.languages_url) 155 | val req = Request[F](Method.GET, url).withHeaders( 156 | Header.Raw(CIString("Authorization"), s"token $ACCESS_TOKEN") 157 | ) 158 | fetchCollectionRecursively[F, Language](c, req).map(Option(_)) 159 | } 160 | } 161 | } 162 | } 163 | 164 | def repoLanguages[F[_]: Async](repo: Repo): Fetch[F, List[Language]] = 165 | Fetch(repo, Languages.source) 166 | 167 | // -- contributors 168 | 169 | case class Contributor(login: String, contributions: Int) 170 | 171 | object Contributors extends Data[Repo, List[Contributor]] { 172 | def name = "Contributors" 173 | 174 | def source[F[_]: Async]: DataSource[F, Repo, List[Contributor]] = 175 | new DataSource[F, Repo, List[Contributor]] { 176 | implicit val contribD: Decoder[Contributor] = deriveDecoder 177 | implicit val contribED: EntityDecoder[F, List[Contributor]] = jsonOf 178 | 179 | def CF: Concurrent[F] = Concurrent[F] 180 | 181 | def data: Data[Repo, List[Contributor]] = Contributors 182 | 183 | def fetch(repo: Repo): F[Option[List[Contributor]]] = { 184 | client[F].use { c => 185 | val url = Uri 186 | .unsafeFromString( 187 | repo.contributors_url 188 | ) +? ("type", "public") +? ("per_page", 100) 189 | val req = 190 | Request[F](Method.GET, url).withHeaders( 191 | Header.Raw(CIString("Authorization"), s"token $ACCESS_TOKEN") 192 | ) 193 | fetchCollectionRecursively[F, Contributor](c, req).map(Option(_)) 194 | } 195 | } 196 | } 197 | } 198 | 199 | def repoContributors[F[_]: Async](repo: Repo): Fetch[F, List[Contributor]] = 200 | Fetch(repo, Contributors.source) 201 | 202 | case class Project(repo: Repo, contributors: List[Contributor], languages: List[Language]) 203 | 204 | def fetchProject[F[_]: Async](repo: Repo): Fetch[F, Project] = 205 | (repoContributors(repo), repoLanguages(repo)).mapN { case (contribs, langs) => 206 | Project(repo = repo, contributors = contribs, languages = langs) 207 | } 208 | 209 | def fetchOrg[F[_]: Async](org: String): Fetch[F, List[Project]] = 210 | for { 211 | repos <- orgRepos(org) 212 | projects <- repos 213 | .filter(r => r.name == "fetch" || r.name == "github4s" || r.name == "memeid") 214 | .batchAllWith(fetchProject[F]) 215 | } yield projects 216 | 217 | def fetchOrgStars[F[_]: Async](org: String): Fetch[F, Int] = 218 | fetchOrg(org).map(projects => projects.map(_.repo.stargazers_count).sum) 219 | 220 | def fetchOrgContributors[F[_]: Async](org: String): Fetch[F, Int] = 221 | fetchOrg(org).map(projects => projects.map(_.contributors.toSet).fold(Set())(_ ++ _).size) 222 | 223 | def fetchOrgLanguages[F[_]: Async](org: String): Fetch[F, Int] = 224 | fetchOrg(org).map(projects => projects.map(_.languages.toSet).fold(Set())(_ ++ _).size) 225 | 226 | "We can fetch org repos" in { 227 | val io = Fetch.runLog[IO](fetchOrg[IO]("47degrees")) 228 | 229 | val (log, _) = io.onError { case _ => cleanUpCache }.unsafeRunSync() 230 | 231 | log.rounds.size shouldEqual 2 232 | } 233 | 234 | // Github HTTP api 235 | 236 | val GITHUB: Uri = Uri.unsafeFromString("https://api.github.com") 237 | 238 | private def fetchCollectionRecursively[F[_], A](c: Client[F], req: Request[F])(implicit 239 | F: Async[F], 240 | E: EntityDecoder[F, List[A]] 241 | ): F[List[A]] = { 242 | val REL_NEXT = "rel=\"next\"".r 243 | 244 | def hasNext(res: Response[F]): Boolean = 245 | res.headers 246 | .get(CIString("Link")) 247 | .fold(false) { hs => 248 | hs.exists(h => REL_NEXT.findFirstIn(h.value).isDefined) 249 | } 250 | 251 | def getNextLink(raw: String): F[String] = { 252 | REL_NEXT 253 | .findFirstMatchIn(raw) 254 | .liftTo[F](new Exception("Couldn't find next link")) 255 | .map { m => 256 | m.before.toString.split(",").last.trim.dropWhile(_ == '<').takeWhile(_ != '>') 257 | } 258 | } 259 | 260 | def getNext(res: Response[F]): F[Uri] = 261 | res.headers 262 | .get(CIString("Link")) 263 | .fold(F.raiseError[Uri](new Exception("next not found"))) { hs => 264 | getNextLink(hs.head.value).map(Uri.unsafeFromString) 265 | } 266 | 267 | c.run(req).use[List[A]] { 268 | case Status.Ok(res) => 269 | if (hasNext(res)) { 270 | for { 271 | repos <- res.as[List[A]] 272 | nxt <- getNext(res) 273 | newReq = req.withUri(nxt) 274 | moreRepos <- fetchCollectionRecursively(c, newReq) 275 | } yield repos ++ moreRepos 276 | } else 277 | res.as[List[A]] 278 | case res => 279 | res.bodyText.compile.string.flatMap(respBody => 280 | F.raiseError( 281 | new Exception( 282 | s"Couldn't complete request, returned status: ${res.status}: Body:\n$respBody" 283 | ) 284 | ) 285 | ) 286 | } 287 | } 288 | 289 | } 290 | -------------------------------------------------------------------------------- /fetch-examples/src/test/scala/GraphQLExample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import org.scalatest.matchers.should.Matchers 20 | import org.scalatest.wordspec.AnyWordSpec 21 | 22 | import atto._, Atto._ 23 | import cats.syntax.all._ 24 | import cats.data.NonEmptyList 25 | import cats.effect._ 26 | import fetch.syntax._ 27 | import scala.concurrent.ExecutionContext 28 | import scala.concurrent.duration._ 29 | 30 | // Types 31 | case class Organization(org: String, projects: List[Project]) 32 | case class Project(name: Option[String], languages: List[String], collaborators: List[String]) 33 | case class Repo(name: String) 34 | 35 | class GraphQLExample extends AnyWordSpec with Matchers { 36 | implicit val executionContext: ExecutionContext = ExecutionContext.Implicits.global 37 | implicit val ioRuntime: unsafe.IORuntime = unsafe.IORuntime.global 38 | 39 | def countFetches(r: Request): Int = 40 | r.request match { 41 | case FetchOne(_, _, false) => 1 42 | case Batch(ids, _, false) => ids.toList.size 43 | case FetchOne(_, _, true) => 0 44 | case Batch(_, _, true) => 0 45 | } 46 | 47 | def totalFetched(rs: Seq[Round]): Int = 48 | rs.map((round: Round) => round.queries.map(countFetches).sum).toList.sum 49 | 50 | def countBatches(r: Request): Int = 51 | r.request match { 52 | case FetchOne(_, _, false) => 0 53 | case Batch(_, _, false) => 1 54 | case FetchOne(_, _, true) => 0 55 | case Batch(_, _, true) => 0 56 | } 57 | 58 | def totalBatches(rs: Seq[Round]): Int = 59 | rs.map((round: Round) => round.queries.map(countBatches).sum).toList.sum 60 | 61 | import Parsers._ 62 | import Sources._ 63 | 64 | val query = """ 65 | query { 66 | organization(login:"47deg") { 67 | repositories(first: 100){ 68 | name, 69 | languages, 70 | collaborators 71 | } 72 | } 73 | } 74 | """ 75 | 76 | val langsQuery = """ 77 | query { 78 | organization(login:"47deg") { 79 | repositories(first: 100){ 80 | languages 81 | } 82 | } 83 | } 84 | """ 85 | 86 | val collabsQuery = """ 87 | query { 88 | organization(login:"47deg") { 89 | repositories(first: 100){ 90 | collaborators 91 | } 92 | } 93 | } 94 | """ 95 | 96 | val orgQuery = """ 97 | query { 98 | organization(login:"47deg") { 99 | repositories(first: 100) { 100 | name 101 | } 102 | } 103 | } 104 | """ 105 | 106 | val repoQuery = """ 107 | query { 108 | organization(login:"47deg") { 109 | repositories(first: 1){ 110 | name, 111 | languages, 112 | collaborators 113 | } 114 | } 115 | } 116 | """ 117 | 118 | def runQuery[F[_]: Async](q: String): Fetch[F, Organization] = 119 | queryParser.parseOnly(q) match { 120 | case ParseResult.Done(_, query) => fetchOrg[F](query) 121 | case _ => Fetch.error(new Exception("Oh noes")) 122 | } 123 | 124 | "We can interpret queries" in { 125 | val io = Fetch.runLog[IO](runQuery(query)) 126 | val (log, result) = io.unsafeRunSync() 127 | 128 | result shouldEqual Organization( 129 | "47deg", 130 | List( 131 | Project(Some("fetch"), List("scala"), List("Peter", "Ale")), 132 | Project(Some("arrow"), List("kotlin"), List("Raul", "Paco", "Simon")) 133 | ) 134 | ) 135 | 136 | log.rounds.size shouldEqual 2 137 | totalBatches(log.rounds) shouldEqual 2 138 | } 139 | 140 | "We can interpret queries with only languages" in { 141 | val io = Fetch.runLog[IO](runQuery(langsQuery)) 142 | val (log, result) = io.unsafeRunSync() 143 | 144 | result shouldEqual Organization( 145 | "47deg", 146 | List(Project(None, List("scala"), List()), Project(None, List("kotlin"), List())) 147 | ) 148 | 149 | log.rounds.size shouldEqual 2 150 | totalBatches(log.rounds) shouldEqual 1 151 | } 152 | 153 | "We can interpret queries with only collaborators" in { 154 | val io = Fetch.runLog[IO](runQuery(collabsQuery)) 155 | val (log, result) = io.unsafeRunSync() 156 | result shouldEqual Organization( 157 | "47deg", 158 | List( 159 | Project(None, List(), List("Peter", "Ale")), 160 | Project(None, List(), List("Raul", "Paco", "Simon")) 161 | ) 162 | ) 163 | 164 | log.rounds.size shouldEqual 2 165 | totalBatches(log.rounds) shouldEqual 1 166 | } 167 | 168 | "We can interpret queries with no nested joins" in { 169 | val io = Fetch.runLog[IO](runQuery(orgQuery)) 170 | val (log, result) = io.unsafeRunSync() 171 | result shouldEqual Organization( 172 | "47deg", 173 | List(Project(Some("fetch"), List(), List()), Project(Some("arrow"), List(), List())) 174 | ) 175 | 176 | log.rounds.size shouldEqual 1 177 | totalBatches(log.rounds) shouldEqual 0 178 | } 179 | 180 | "We can interpret queries with a limited number of repositories" in { 181 | val io = Fetch.runLog[IO](runQuery(repoQuery)) 182 | val (log, result) = io.unsafeRunSync() 183 | 184 | result shouldEqual Organization( 185 | "47deg", 186 | List(Project(Some("fetch"), List("scala"), List("Peter", "Ale"))) 187 | ) 188 | 189 | log.rounds.size shouldEqual 2 190 | totalBatches(log.rounds) shouldEqual 0 191 | } 192 | 193 | def fetchOrg[F[_]: Async](q: OrganizationQuery): Fetch[F, Organization] = 194 | q.repos match { 195 | case None => Fetch.pure(Organization(q.org, List())) 196 | case Some(r) => fetchRepos(q.org, r).map(rs => Organization(q.org, rs)) 197 | } 198 | 199 | private def fetchRepos[F[_]: Async]( 200 | org: String, 201 | q: RepositoriesQuery 202 | ): Fetch[F, List[Project]] = 203 | q match { 204 | case RepositoriesQuery(n, name, Some(_), Some(_)) => 205 | for { 206 | repos <- Repos.fetch(org) 207 | projects <- { 208 | val nRepos = repos.take(n) 209 | val fetches = nRepos.map { repo => 210 | (Languages.fetch(repo), Collaborators.fetch(repo)).mapN { case (ls, cs) => 211 | Project(name >> Some(repo.name), ls, cs) 212 | } 213 | } 214 | fetches.batchAll 215 | } 216 | } yield projects 217 | 218 | case RepositoriesQuery(n, name, None, None) => 219 | Repos.fetch(org).map(_.map(r => Project(name >> Some(r.name), List(), List()))) 220 | 221 | case RepositoriesQuery(n, name, Some(_), None) => 222 | for { 223 | repos <- Repos.fetch(org) 224 | projects <- { 225 | val fetches = repos.map { r => 226 | Languages.fetch(r).map(ls => Project(name >> Some(r.name), ls, List())) 227 | } 228 | fetches.batchAll 229 | } 230 | } yield projects 231 | 232 | case RepositoriesQuery(n, name, None, Some(_)) => 233 | for { 234 | repos <- Repos.fetch(org) 235 | projects <- { 236 | val fetches = repos.map { r => 237 | Collaborators.fetch(r).map(cs => Project(name >> Some(r.name), List(), cs)) 238 | } 239 | fetches.batchAll 240 | } 241 | } yield projects 242 | } 243 | } 244 | 245 | object Parsers { 246 | def queryParser: Parser[OrganizationQuery] = 247 | rawParser.map { case (o, n) => 248 | OrganizationQuery( 249 | o, 250 | n.map { case (i, name, langs, colls) => 251 | RepositoriesQuery( 252 | i, 253 | if (name) Some(()) else None, 254 | if (langs) Some(LanguagesQuery()) else None, 255 | if (colls) Some(CollaboratorsQuery()) else None 256 | ) 257 | } 258 | ) 259 | } 260 | 261 | def rawParser: Parser[(String, Option[(Int, Boolean, Boolean, Boolean)])] = 262 | for { 263 | _ <- skipWhitespace 264 | _ <- string("query") 265 | 266 | _ <- leftBrace 267 | org <- organization 268 | 269 | _ <- leftBrace 270 | repos <- opt(repositories) 271 | 272 | _ <- rightBrace 273 | _ <- rightBrace 274 | } yield (org, repos) 275 | 276 | def leftBrace: Parser[Unit] = skipWhitespace >> char('{') >> skipWhitespace 277 | def rightBrace: Parser[Unit] = skipWhitespace >> char('}') >> skipWhitespace 278 | 279 | def organization: Parser[String] = 280 | string("organization") >> 281 | parens(string("login") >> char(':') >> stringLiteral) 282 | 283 | def repositories: Parser[(Int, Boolean, Boolean, Boolean)] = 284 | for { 285 | i <- string("repositories") >> parens(string("first") >> char(':') >> skipWhitespace >> int) 286 | _ <- leftBrace 287 | (name, langs, colls) <- organizationQuery 288 | _ <- rightBrace 289 | } yield (i, name, langs, colls) 290 | 291 | def organizationQuery: Parser[(Boolean, Boolean, Boolean)] = 292 | for { 293 | name <- opt(string("name")).map(!_.isEmpty) 294 | _ <- opt(char(',') >> skipWhitespace) 295 | langs <- languages 296 | _ <- opt(char(',') >> skipWhitespace) 297 | colls <- collaborators 298 | } yield (name, langs, colls) 299 | 300 | val languages: Parser[Boolean] = 301 | opt(string("languages")).map(!_.isEmpty) 302 | 303 | val collaborators: Parser[Boolean] = 304 | opt(string("collaborators")).map(!_.isEmpty) 305 | 306 | case class LanguagesQuery() 307 | case class CollaboratorsQuery() 308 | case class RepositoriesQuery( 309 | n: Int, 310 | name: Option[Unit] = None, 311 | languages: Option[LanguagesQuery] = None, 312 | collaborators: Option[CollaboratorsQuery] = None 313 | ) 314 | case class OrganizationQuery(org: String, repos: Option[RepositoriesQuery]) 315 | } 316 | 317 | object Sources { 318 | val reposDb = Map( 319 | "47deg" -> List(Repo("fetch"), Repo("arrow")) 320 | ) 321 | 322 | object Repos extends Data[String, List[Repo]] { 323 | def name = "Repos" 324 | 325 | def source[F[_]: Async]: DataSource[F, String, List[Repo]] = 326 | new DataSource[F, String, List[Repo]] { 327 | def CF = Async[F] 328 | def data = Repos 329 | 330 | def fetch(id: String): F[Option[List[Repo]]] = 331 | CF.pure(reposDb.get(id)) 332 | } 333 | 334 | def fetch[F[_]: Async](org: String): Fetch[F, List[Repo]] = 335 | Fetch(org, source) 336 | } 337 | 338 | val langsDb = Map( 339 | Repo("fetch") -> List("scala"), 340 | Repo("arrow") -> List("kotlin") 341 | ) 342 | 343 | object Languages extends Data[Repo, List[String]] { 344 | def name = "Languages" 345 | 346 | def source[F[_]: Async]: DataSource[F, Repo, List[String]] = 347 | new DataSource[F, Repo, List[String]] { 348 | def CF = Async[F] 349 | def data = Languages 350 | 351 | def fetch(id: Repo): F[Option[List[String]]] = 352 | CF.pure(langsDb.get(id)) 353 | } 354 | 355 | def fetch[F[_]: Async](repo: Repo): Fetch[F, List[String]] = 356 | Fetch(repo, source) 357 | } 358 | 359 | val collabsDb = Map( 360 | Repo("fetch") -> List("Peter", "Ale"), 361 | Repo("arrow") -> List("Raul", "Paco", "Simon") 362 | ) 363 | 364 | object Collaborators extends Data[Repo, List[String]] { 365 | def name = "Collaborators" 366 | 367 | def source[F[_]: Async]: DataSource[F, Repo, List[String]] = 368 | new DataSource[F, Repo, List[String]] { 369 | def CF = Async[F] 370 | def data = Collaborators 371 | 372 | def fetch(id: Repo): F[Option[List[String]]] = 373 | CF.pure(collabsDb.get(id)) 374 | } 375 | 376 | def fetch[F[_]: Async](repo: Repo): Fetch[F, List[String]] = 377 | Fetch(repo, source) 378 | } 379 | } 380 | -------------------------------------------------------------------------------- /fetch-examples/src/test/scala/Http4sExample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import scala.concurrent.ExecutionContext 18 | import scala.concurrent.duration._ 19 | 20 | import cats.data.NonEmptyList 21 | import cats.effect._ 22 | import cats.instances.list._ 23 | import cats.syntax.all._ 24 | 25 | import io.circe._ 26 | import io.circe.generic.semiauto._ 27 | 28 | import org.http4s.client.Client 29 | import org.http4s.circe._ 30 | import org.http4s.blaze.client._ 31 | import org.scalatest.matchers.should.Matchers 32 | import org.scalatest.wordspec.AnyWordSpec 33 | 34 | import java.util.concurrent._ 35 | 36 | import fetch._ 37 | import fetch.syntax._ 38 | 39 | object HttpExample { 40 | case class UserId(id: Int) 41 | case class PostId(id: Int) 42 | 43 | case class User(id: UserId, name: String, username: String, email: String) 44 | case class Post(id: PostId, userId: UserId, title: String, body: String) 45 | 46 | object Http { 47 | val executionContext = 48 | ExecutionContext.fromExecutor(new ScheduledThreadPoolExecutor(2)) 49 | 50 | def client[F[_]: Async]: Resource[F, Client[F]] = 51 | BlazeClientBuilder[F](executionContext).resource 52 | 53 | implicit val userIdDecoder: Decoder[UserId] = Decoder[Int].map(UserId.apply) 54 | implicit val postIdDecoder: Decoder[PostId] = Decoder[Int].map(PostId.apply) 55 | implicit val userDecoder: Decoder[User] = deriveDecoder 56 | implicit val postDecoder: Decoder[Post] = deriveDecoder 57 | } 58 | 59 | object Users extends Data[UserId, User] { 60 | import Http._ 61 | 62 | def name = "Users" 63 | 64 | def http[F[_]: Async]: DataSource[F, UserId, User] = 65 | new DataSource[F, UserId, User] { 66 | def data = Users 67 | 68 | override def CF = Async[F] 69 | 70 | override def fetch(id: UserId): F[Option[User]] = { 71 | val url = s"https://jsonplaceholder.typicode.com/users?id=${id.id}" 72 | client[F].use((c) => c.expect(url)(jsonOf[F, List[User]])).map(_.headOption) 73 | } 74 | 75 | override def batch(ids: NonEmptyList[UserId]): F[Map[UserId, User]] = { 76 | val filterIds = ids.map("id=" + _.id).toList.mkString("&") 77 | val url = s"https://jsonplaceholder.typicode.com/users?$filterIds" 78 | val io = client[F].use((c) => c.expect(url)(jsonOf[F, List[User]])) 79 | io.map(users => users.map(user => user.id -> user).toMap) 80 | } 81 | } 82 | } 83 | 84 | object Posts extends Data[UserId, List[Post]] { 85 | import Http._ 86 | 87 | def name = "Posts" 88 | 89 | def http[F[_]: Async]: DataSource[F, UserId, List[Post]] = 90 | new DataSource[F, UserId, List[Post]] { 91 | def data = Posts 92 | 93 | override def CF = Async[F] 94 | 95 | override def fetch(id: UserId): F[Option[List[Post]]] = { 96 | val url = s"https://jsonplaceholder.typicode.com/posts?userId=${id.id}" 97 | client[F].use((c) => c.expect(url)(jsonOf[F, List[Post]])).map(Option.apply) 98 | } 99 | 100 | override def batch(ids: NonEmptyList[UserId]): F[Map[UserId, List[Post]]] = { 101 | val filterIds = ids.map("userId=" + _.id).toList.mkString("&") 102 | val url = s"https://jsonplaceholder.typicode.com/posts?$filterIds" 103 | client[F].use((c) => c.expect(url)(jsonOf[F, List[Post]])).map(_.groupBy(_.userId).toMap) 104 | } 105 | } 106 | } 107 | 108 | def fetchUserById[F[_]: Async](id: UserId): Fetch[F, User] = 109 | Fetch(id, Users.http) 110 | 111 | def fetchPostsForUser[F[_]: Async](id: UserId): Fetch[F, List[Post]] = 112 | Fetch(id, Posts.http) 113 | 114 | def fetchUser[F[_]: Async](id: Int): Fetch[F, User] = 115 | fetchUserById(UserId(id)) 116 | 117 | def fetchManyUsers[F[_]: Async](ids: List[Int]): Fetch[F, List[User]] = 118 | ids.map(i => fetchUserById(UserId(i))).batchAll 119 | 120 | def fetchPosts[F[_]: Async](user: User): Fetch[F, (User, List[Post])] = 121 | fetchPostsForUser(user.id).map(posts => (user, posts)) 122 | } 123 | 124 | class Http4sExample extends AnyWordSpec with Matchers { 125 | import HttpExample._ 126 | 127 | // runtime 128 | val executionContext = ExecutionContext.global 129 | implicit val ioRuntime: unsafe.IORuntime = unsafe.IORuntime.global 130 | 131 | "We can fetch one user" in { 132 | val io: IO[(Log, User)] = Fetch.runLog[IO](fetchUser(1)) 133 | 134 | val (log, result) = io.unsafeRunSync() 135 | 136 | println(result) 137 | log.rounds.size shouldEqual 1 138 | } 139 | 140 | "We can fetch multiple users in parallel" in { 141 | val io = Fetch.runLog[IO](fetchManyUsers(List(1, 2, 3))) 142 | 143 | val (log, result) = io.unsafeRunSync() 144 | 145 | result.foreach(println) 146 | log.rounds.size shouldEqual 1 147 | } 148 | 149 | "We can fetch multiple users with their posts" in { 150 | def fetch[F[_]: Async]: Fetch[F, List[(User, List[Post])]] = 151 | for { 152 | users <- fetchManyUsers(List(1, 2)) 153 | usersWithPosts <- users.map(fetchPosts[F]).batchAll 154 | } yield usersWithPosts 155 | 156 | val io = Fetch.runLog[IO](fetch) 157 | 158 | val (log, results) = io.unsafeRunSync() 159 | 160 | results 161 | .map { case (user, posts) => 162 | s"${user.username} has ${posts.size} posts" 163 | } 164 | .foreach(println) 165 | log.rounds.size shouldEqual 2 166 | } 167 | 168 | } 169 | -------------------------------------------------------------------------------- /fetch-examples/src/test/scala/JedisExample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import scala.concurrent.ExecutionContext 18 | import scala.concurrent.duration._ 19 | 20 | import cats.Monad 21 | import cats.data.NonEmptyList 22 | import cats.effect._ 23 | import cats.instances.list._ 24 | import cats.syntax.all._ 25 | 26 | import io.circe._ 27 | import io.circe.generic.semiauto._ 28 | 29 | import org.scalatest.matchers.should.Matchers 30 | import org.scalatest.wordspec.AnyWordSpec 31 | 32 | import java.io._ 33 | import java.nio.charset.Charset 34 | import redis.clients.jedis._ 35 | import scala.util.Try 36 | 37 | import fetch._ 38 | 39 | object DataSources { 40 | object Numbers extends Data[Int, Int] { 41 | def name = "Numbers" 42 | 43 | def source[F[_]: Async]: DataSource[F, Int, Int] = 44 | new DataSource[F, Int, Int] { 45 | def data = Numbers 46 | 47 | override def CF = Async[F] 48 | 49 | override def fetch(id: Int): F[Option[Int]] = 50 | CF.pure(Option(id)) 51 | } 52 | } 53 | 54 | def fetchNumber[F[_]: Async](id: Int): Fetch[F, Int] = 55 | Fetch(id, Numbers.source) 56 | 57 | def fetch[F[_]: Async]: Fetch[F, HttpExample.User] = 58 | for { 59 | _ <- HttpExample.fetchUser(1) 60 | n <- fetchNumber(1) 61 | _ <- HttpExample.fetchUser(n) 62 | _ <- fetchNumber(n) 63 | u <- HttpExample.fetchUser(n) 64 | } yield u 65 | 66 | def fetchMulti[F[_]: Async]: Fetch[F, List[HttpExample.User]] = 67 | List(4, 5, 6).traverse(HttpExample.fetchUser[F](_)) 68 | } 69 | 70 | object Binary { 71 | type ByteArray = Array[Byte] 72 | 73 | def byteOutputStream[F[_]](implicit S: Sync[F]): Resource[F, ByteArrayOutputStream] = 74 | Resource.fromAutoCloseable(S.delay(new ByteArrayOutputStream())) 75 | 76 | def byteInputStream[F[_]]( 77 | bin: ByteArray 78 | )(implicit S: Sync[F]): Resource[F, ByteArrayInputStream] = 79 | Resource.fromAutoCloseable(S.delay(new ByteArrayInputStream(bin))) 80 | 81 | def outputStream[F[_]]( 82 | b: ByteArrayOutputStream 83 | )(implicit S: Sync[F]): Resource[F, ObjectOutputStream] = 84 | Resource.fromAutoCloseable(S.delay(new ObjectOutputStream(b))) 85 | 86 | def inputStream[F[_]]( 87 | b: ByteArrayInputStream 88 | )(implicit S: Sync[F]): Resource[F, ObjectInputStream] = 89 | Resource.fromAutoCloseable(S.delay(new ObjectInputStream(b))) 90 | 91 | def fromString(s: String): Array[Byte] = 92 | s.getBytes(Charset.forName("UTF-8")) 93 | 94 | def serialize[F[_], A](obj: A)(implicit 95 | S: Sync[F] 96 | ): F[ByteArray] = { 97 | byteOutputStream 98 | .mproduct(outputStream(_)) 99 | .use { case (byte, out) => 100 | S.delay { 101 | out.writeObject(obj) 102 | out.flush() 103 | byte.toByteArray 104 | } 105 | } 106 | } 107 | 108 | def deserialize[F[_], A](bin: ByteArray)(implicit 109 | S: Sync[F] 110 | ): F[Option[A]] = { 111 | byteInputStream(bin) 112 | .mproduct(inputStream(_)) 113 | .use { case (byte, in) => 114 | S.delay { 115 | val obj = in.readObject() 116 | Try(obj.asInstanceOf[A]).toOption 117 | } 118 | } 119 | } 120 | } 121 | 122 | case class RedisCache[F[_]: Sync](host: String) extends DataCache[F] { 123 | private val pool = new JedisPool(host) 124 | 125 | def connection: Resource[F, Jedis] = 126 | Resource.fromAutoCloseable(Sync[F].delay(pool.getResource)) 127 | 128 | private def get(i: Array[Byte]): F[Option[Array[Byte]]] = 129 | connection.use(c => Sync[F].delay(Option(c.get(i)))) 130 | 131 | private def set(i: Array[Byte], v: Array[Byte]): F[Unit] = 132 | connection.use(c => Sync[F].delay(c.set(i, v)).void) 133 | 134 | private def bulkSet(ivs: List[(Array[Byte], Array[Byte])]): F[Unit] = 135 | connection.use(c => 136 | Sync[F].delay { 137 | val pipe = c.pipelined 138 | ivs.foreach(i => pipe.set(i._1, i._2)) 139 | pipe.sync 140 | } 141 | ) 142 | 143 | private def cacheId[I, A](i: I, data: Data[I, A]): Array[Byte] = 144 | Binary.fromString(s"${data.identity} $i") 145 | 146 | override def lookup[I, A](i: I, data: Data[I, A]): F[Option[A]] = 147 | get(cacheId(i, data)) >>= { 148 | case None => Sync[F].pure(None) 149 | case Some(r) => Binary.deserialize[F, A](r) 150 | } 151 | 152 | override def insert[I, A](i: I, v: A, data: Data[I, A]): F[DataCache[F]] = 153 | for { 154 | s <- Binary.serialize(v) 155 | _ <- set(cacheId(i, data), s) 156 | } yield this 157 | 158 | override def bulkInsert[I, A](vs: List[(I, A)], data: Data[I, A])(implicit 159 | M: Monad[F] 160 | ): F[DataCache[F]] = 161 | for { 162 | bin <- vs.traverse { case (id, v) => 163 | Binary.serialize(v).tupleRight(cacheId(id, data)) 164 | } 165 | _ <- Sync[F].delay(bulkSet(bin)) 166 | } yield this 167 | 168 | } 169 | 170 | class JedisExample extends AnyWordSpec with Matchers { 171 | import DataSources._ 172 | 173 | // runtime 174 | val executionContext = ExecutionContext.Implicits.global 175 | implicit val ioRuntime: unsafe.IORuntime = unsafe.IORuntime.global 176 | 177 | "We can use a Redis cache" ignore { 178 | val cache = RedisCache[IO]("localhost") 179 | 180 | val io: IO[(Log, HttpExample.User)] = Fetch.runLog[IO](fetch, cache) 181 | 182 | val (log, result) = io.unsafeRunSync() 183 | 184 | println(result) 185 | log.rounds.size shouldEqual 2 186 | 187 | val io2: IO[(Log, HttpExample.User)] = Fetch.runLog[IO](fetch, cache) 188 | 189 | val (log2, result2) = io2.unsafeRunSync() 190 | 191 | println(result2) 192 | log2.rounds.size shouldEqual 0 193 | } 194 | 195 | "We can bulk insert in a Redis cache" ignore { 196 | val cache = RedisCache[IO]("localhost") 197 | 198 | val io: IO[(Log, List[HttpExample.User])] = Fetch.runLog[IO](fetchMulti, cache) 199 | 200 | val (log, result) = io.unsafeRunSync() 201 | 202 | println(result) 203 | log.rounds.size shouldEqual 1 204 | 205 | val io2: IO[(Log, List[HttpExample.User])] = Fetch.runLog[IO](fetchMulti, cache) 206 | 207 | val (log2, result2) = io2.unsafeRunSync() 208 | 209 | println(result2) 210 | log2.rounds.size shouldEqual 0 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /fetch/src/main/scala/cache.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats._ 20 | import cats.effect._ 21 | import cats.data.NonEmptyList 22 | import cats.instances.list._ 23 | import cats.syntax.all._ 24 | 25 | final class DataSourceId(val id: Any) extends AnyVal 26 | final class DataSourceResult(val result: Any) extends AnyVal 27 | 28 | /** 29 | * A `Cache` trait so the users of the library can provide their own cache. 30 | */ 31 | trait DataCache[F[_]] { 32 | def lookup[I, A](i: I, data: Data[I, A]): F[Option[A]] 33 | 34 | def insert[I, A](i: I, v: A, data: Data[I, A]): F[DataCache[F]] 35 | 36 | def bulkInsert[I, A](vs: List[(I, A)], data: Data[I, A])(implicit 37 | M: Monad[F] 38 | ): F[DataCache[F]] = { 39 | vs.foldLeftM(this) { case (acc, (i, v)) => 40 | acc.insert(i, v, data) 41 | } 42 | } 43 | } 44 | 45 | /** 46 | * A cache that stores its elements in memory. 47 | */ 48 | case class InMemoryCache[F[_]: Monad](state: Map[(Data[Any, Any], DataSourceId), DataSourceResult]) 49 | extends DataCache[F] { 50 | def lookup[I, A](i: I, data: Data[I, A]): F[Option[A]] = 51 | Applicative[F].pure( 52 | state 53 | .get((data.asInstanceOf[Data[Any, Any]], new DataSourceId(i))) 54 | .map(_.result.asInstanceOf[A]) 55 | ) 56 | 57 | def insert[I, A](i: I, v: A, data: Data[I, A]): F[DataCache[F]] = 58 | Applicative[F].pure( 59 | copy(state = 60 | state.updated( 61 | (data.asInstanceOf[Data[Any, Any]], new DataSourceId(i)), 62 | new DataSourceResult(v) 63 | ) 64 | ) 65 | ) 66 | } 67 | 68 | object InMemoryCache { 69 | def empty[F[_]: Monad]: InMemoryCache[F] = 70 | InMemoryCache[F](Map.empty[(Data[Any, Any], DataSourceId), DataSourceResult]) 71 | 72 | def from[F[_]: Monad, I, A](results: ((Data[I, A], I), A)*): InMemoryCache[F] = 73 | InMemoryCache[F](results.foldLeft(Map.empty[(Data[Any, Any], DataSourceId), DataSourceResult]) { 74 | case (acc, ((data, i), v)) => 75 | acc.updated( 76 | (data.asInstanceOf[Data[Any, Any]], new DataSourceId(i)), 77 | new DataSourceResult(v) 78 | ) 79 | }) 80 | } 81 | -------------------------------------------------------------------------------- /fetch/src/main/scala/datasource.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats.data.NonEmptyList 20 | import cats.effect._ 21 | import cats.effect.implicits._ 22 | import cats.effect.std.{Queue, Supervisor} 23 | import cats.kernel.{Hash => H} 24 | import cats.syntax.all._ 25 | 26 | import scala.collection.mutable 27 | import scala.concurrent.duration.FiniteDuration 28 | 29 | /** 30 | * `Data` is a trait used to identify and optimize access to a `DataSource`. 31 | */ 32 | trait Data[I, A] { self => 33 | def name: String 34 | 35 | def identity: Data.Identity = 36 | H.fromUniversalHashCode.hash(self) 37 | } 38 | 39 | object Data { 40 | type Identity = Int 41 | } 42 | 43 | /** 44 | * A `DataSource` is the recipe for fetching a certain identity `I`, which yields results of type 45 | * `A` performing an effect of type `F[_]`. 46 | */ 47 | trait DataSource[F[_], I, A] { 48 | def data: Data[I, A] 49 | 50 | implicit def CF: Concurrent[F] 51 | 52 | /** 53 | * Fetch one identity, returning a None if it wasn't found. 54 | */ 55 | def fetch(id: I): F[Option[A]] 56 | 57 | /** 58 | * Fetch many identities, returning a mapping from identities to results. If an identity wasn't 59 | * found, it won't appear in the keys. 60 | */ 61 | def batch(ids: NonEmptyList[I]): F[Map[I, A]] = 62 | FetchExecution 63 | .parallel( 64 | ids.map(id => fetch(id).tupleLeft(id)) 65 | ) 66 | .map(_.collect { case (id, Some(x)) => id -> x }.toMap) 67 | 68 | def maxBatchSize: Option[Int] = None 69 | 70 | def batchExecution: BatchExecution = InParallel 71 | } 72 | 73 | object DataSource { 74 | private def upToWithin[F[_], T](queue: Queue[F, T], maxElements: Int, interval: FiniteDuration)( 75 | implicit F: Temporal[F] 76 | ): F[List[T]] = { 77 | Ref[F].of(List.empty[T]).flatMap { ref => 78 | val takeAndBuffer = F.uncancelable { poll => 79 | poll(queue.take).flatMap { x => 80 | ref.updateAndGet(list => x :: list) 81 | } 82 | } 83 | val bufferUntilNumElements = takeAndBuffer.iterateUntil { buffer => 84 | buffer.size == maxElements 85 | } 86 | F.timeoutTo(bufferUntilNumElements, interval, ref.get) 87 | } 88 | } 89 | 90 | /** 91 | * Returns a new DataSource that will batch Fetch requests across executions within a given 92 | * interval. 93 | * 94 | * As an example, if we have a Fetch request A, and a fetch request B that are being executed 95 | * simultaneously without knowledge of the other within some milliseconds of the other, the 96 | * datasource will transparently batch the two requests in a single batch call execution. 97 | * 98 | * This is useful if you want to treat each fetch individually from the others, for example in an 99 | * HTTP server processing requests. 100 | * 101 | * The original DataSource limits will be respected 102 | * 103 | * @param dataSource 104 | * the original datasource to be wrapped 105 | * @param delayPerBatch 106 | * the interval for processing Fetch requests as a single Batch call 107 | * @return 108 | */ 109 | def batchAcrossFetches[F[_], I, A]( 110 | dataSource: DataSource[F, I, A], 111 | delayPerBatch: FiniteDuration 112 | )(implicit 113 | F: Async[F] 114 | ): Resource[F, DataSource[F, I, A]] = { 115 | type Callback = Either[Throwable, Option[A]] => Unit 116 | for { 117 | queue <- Resource.eval(Queue.unbounded[F, (I, Callback)]) 118 | supervisor <- Supervisor[F] 119 | workerFiber = upToWithin( 120 | queue, 121 | dataSource.maxBatchSize.getOrElse(Int.MaxValue), 122 | delayPerBatch 123 | ).flatMap { x => 124 | if (x.isEmpty) { 125 | supervisor.supervise(F.unit) 126 | } else { 127 | val asMap = x.groupBy(_._1).mapValues(callbacks => callbacks.map(_._2)) 128 | val batchResults = dataSource.batch(NonEmptyList.fromListUnsafe(asMap.keys.toList)) 129 | val resultsHaveBeenSent = batchResults.map { results => 130 | asMap.foreach { case (identity, callbacks) => 131 | callbacks.foreach(cb => cb(Right(results.get(identity)))) 132 | } 133 | } 134 | val fiberWork = F.handleError(resultsHaveBeenSent) { ex => 135 | asMap.foreach { case (_, callbacks) => 136 | callbacks.foreach(cb => cb(Left(ex))) 137 | } 138 | } 139 | supervisor.supervise(fiberWork) 140 | } 141 | }.foreverM[Unit] 142 | _ <- F.background(workerFiber) 143 | } yield { 144 | new DataSource[F, I, A] { 145 | override def data: Data[I, A] = dataSource.data 146 | 147 | override implicit def CF: Concurrent[F] = dataSource.CF 148 | 149 | override def fetch(id: I): F[Option[A]] = { 150 | F.async { cb => 151 | queue.offer((id, cb)) *> F.pure(None) 152 | } 153 | } 154 | } 155 | } 156 | } 157 | } 158 | 159 | sealed trait BatchExecution extends Product with Serializable 160 | case object Sequentially extends BatchExecution 161 | case object InParallel extends BatchExecution 162 | -------------------------------------------------------------------------------- /fetch/src/main/scala/execution.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats.Parallel 20 | import cats.data.NonEmptyList 21 | import cats.syntax.all._ 22 | 23 | private object FetchExecution { 24 | def parallel[F[_]: Parallel, A](effects: NonEmptyList[F[A]]): F[NonEmptyList[A]] = 25 | effects.parSequence 26 | } 27 | -------------------------------------------------------------------------------- /fetch/src/main/scala/log.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import scala.collection.immutable._ 20 | 21 | /** 22 | * A log that is passed along during the fetch rounds to record a fetch execution. It holds the a 23 | * list of rounds that have been executed. 24 | */ 25 | trait Log { 26 | def rounds: List[Round] 27 | def append(round: Round): Log 28 | } 29 | 30 | /** 31 | * A data structure that holds information about a request inside a fetch round. 32 | */ 33 | case class Request( 34 | request: FetchRequest, 35 | start: Long, 36 | end: Long 37 | ) { 38 | def duration: Long = end - start 39 | } 40 | 41 | /** 42 | * A data structure that holds information about a fetch round. 43 | */ 44 | case class Round( 45 | queries: List[Request] 46 | ) 47 | 48 | /** 49 | * A concrete implementation of `Log` used in Fetch. 50 | */ 51 | case class FetchLog( 52 | q: Queue[Round] = Queue.empty 53 | ) extends Log { 54 | def rounds = q.toList 55 | def append(round: Round): Log = 56 | copy(q = q :+ round) 57 | } 58 | -------------------------------------------------------------------------------- /fetch/src/main/scala/syntax.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats._ 20 | import cats.effect._ 21 | import fetch.Fetch 22 | 23 | object syntax { 24 | 25 | /** 26 | * Implicit syntax to lift any value to the context of Fetch via pure 27 | */ 28 | implicit class FetchIdSyntax[A](val a: A) extends AnyVal { 29 | 30 | def fetch[F[_]: Concurrent]: Fetch[F, A] = 31 | Fetch.pure[F, A](a) 32 | } 33 | 34 | /** 35 | * Implicit syntax to lift exception to Fetch errors 36 | */ 37 | implicit class FetchExceptionSyntax[B](val a: Throwable) extends AnyVal { 38 | 39 | def fetch[F[_]: Concurrent]: Fetch[F, B] = 40 | Fetch.error[F, B](a) 41 | } 42 | 43 | implicit class FetchSeqBatchSyntax[F[_]: Monad, A](fetches: Seq[Fetch[F, A]]) { 44 | 45 | def batchAll: Fetch[F, List[A]] = Fetch.batchAll(fetches: _*) 46 | } 47 | 48 | implicit class SeqSyntax[A](val as: Seq[A]) extends AnyVal { 49 | 50 | def batchAllWith[F[_]: Monad, B](f: A => Fetch[F, B]) = Fetch.batchAll(as.map(f): _*) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /fetch/src/test/scala/FetchAsyncQueryTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import cats.instances.list._ 18 | import cats.effect._ 19 | import cats.syntax.all._ 20 | 21 | import fetch._ 22 | 23 | class FetchAsyncQueryTests extends FetchSpec { 24 | import DataSources._ 25 | 26 | "We can interpret an async fetch into an IO" in { 27 | def fetch[F[_]: Async]: Fetch[F, Article] = 28 | article(1) 29 | 30 | val io = Fetch.run[IO](fetch) 31 | 32 | io.map(_ shouldEqual Article(1, "An article with id 1")).unsafeToFuture() 33 | } 34 | 35 | "We can combine several async data sources and interpret a fetch into an IO" in { 36 | def fetch[F[_]: Async]: Fetch[F, (Article, Author)] = 37 | for { 38 | art <- article(1) 39 | author <- author(art) 40 | } yield (art, author) 41 | 42 | val io = Fetch.run[IO](fetch) 43 | 44 | io.map(_ shouldEqual (Article(1, "An article with id 1"), Author(2, "@egg2"))).unsafeToFuture() 45 | } 46 | 47 | "We can use combinators in a for comprehension and interpret a fetch from async sources into an IO" in { 48 | def fetch[F[_]: Async]: Fetch[F, List[Article]] = 49 | for { 50 | articles <- List(1, 1, 2).traverse(article[F]) 51 | } yield articles 52 | 53 | val io = Fetch.run[IO](fetch) 54 | 55 | io.map( 56 | _ shouldEqual List( 57 | Article(1, "An article with id 1"), 58 | Article(1, "An article with id 1"), 59 | Article(2, "An article with id 2") 60 | ) 61 | ).unsafeToFuture() 62 | } 63 | 64 | "We can use combinators and multiple sources in a for comprehension and interpret a fetch from async sources into an IO" in { 65 | def fetch[F[_]: Async] = 66 | for { 67 | articles <- List(1, 1, 2).traverse(article[F]) 68 | authors <- articles.traverse(author[F]) 69 | } yield (articles, authors) 70 | 71 | val io = Fetch.run[IO](fetch) 72 | 73 | io.map( 74 | _ shouldEqual ( 75 | List( 76 | Article(1, "An article with id 1"), 77 | Article(1, "An article with id 1"), 78 | Article(2, "An article with id 2") 79 | ), 80 | List( 81 | Author(2, "@egg2"), 82 | Author(2, "@egg2"), 83 | Author(3, "@egg3") 84 | ) 85 | ) 86 | ).unsafeToFuture() 87 | } 88 | } 89 | 90 | object DataSources { 91 | case class ArticleId(id: Int) 92 | case class Article(id: Int, content: String) { 93 | def author: Int = id + 1 94 | } 95 | 96 | object Article extends Data[ArticleId, Article] { 97 | def name = "Articles" 98 | 99 | implicit def async[F[_]](implicit 100 | AF: Async[F] 101 | ): DataSource[F, ArticleId, Article] = 102 | new DataSource[F, ArticleId, Article] { 103 | override def CF = Concurrent[F] 104 | 105 | override def data = Article 106 | 107 | override def fetch(id: ArticleId): F[Option[Article]] = 108 | AF.async_[Option[Article]] { (cb) => 109 | cb(Right(Option(Article(id.id, "An article with id " + id.id)))) 110 | } 111 | } 112 | } 113 | 114 | def article[F[_]: Async](id: Int): Fetch[F, Article] = 115 | Fetch(ArticleId(id), Article.async[F]) 116 | 117 | case class AuthorId(id: Int) 118 | case class Author(id: Int, name: String) 119 | 120 | object Author extends Data[AuthorId, Author] { 121 | def name = "Authors" 122 | 123 | implicit def async[F[_]](implicit 124 | AF: Async[F] 125 | ): DataSource[F, AuthorId, Author] = 126 | new DataSource[F, AuthorId, Author] { 127 | override def CF = Concurrent[F] 128 | 129 | override def data = Author 130 | 131 | override def fetch(id: AuthorId): F[Option[Author]] = 132 | AF.async_ { cb => 133 | cb(Right(Option(Author(id.id, "@egg" + id.id)))) 134 | } 135 | } 136 | } 137 | 138 | def author[F[_]: Async](a: Article): Fetch[F, Author] = 139 | Fetch(AuthorId(a.author), Author.async) 140 | } 141 | -------------------------------------------------------------------------------- /fetch/src/test/scala/FetchBatchingTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats._ 20 | import cats.data.NonEmptyList 21 | import cats.instances.list._ 22 | import cats.syntax.all._ 23 | import cats.effect._ 24 | import fetch._ 25 | import fetch.syntax._ 26 | 27 | import java.util.concurrent.atomic.AtomicInteger 28 | import scala.concurrent.duration.{DurationInt, FiniteDuration} 29 | 30 | class FetchBatchingTests extends FetchSpec { 31 | import TestHelper._ 32 | 33 | case class BatchedDataSeq(id: Int) 34 | 35 | object SeqBatch extends Data[BatchedDataSeq, Int] { 36 | def name = "Sequential batching" 37 | 38 | implicit def source[F[_]: Concurrent]: DataSource[F, BatchedDataSeq, Int] = 39 | new DataSource[F, BatchedDataSeq, Int] { 40 | override def data = SeqBatch 41 | 42 | override def CF = Concurrent[F] 43 | 44 | override def fetch(id: BatchedDataSeq): F[Option[Int]] = 45 | CF.pure(Some(id.id)) 46 | 47 | override val maxBatchSize = Some(2) 48 | 49 | override val batchExecution = Sequentially 50 | } 51 | } 52 | 53 | case class BatchedDataPar(id: Int) 54 | 55 | object ParBatch extends Data[BatchedDataPar, Int] { 56 | def name = "Parallel batching" 57 | 58 | implicit def source[F[_]: Concurrent]: DataSource[F, BatchedDataPar, Int] = 59 | new DataSource[F, BatchedDataPar, Int] { 60 | override def data = ParBatch 61 | 62 | override def CF = Concurrent[F] 63 | 64 | override def fetch(id: BatchedDataPar): F[Option[Int]] = 65 | CF.pure(Some(id.id)) 66 | 67 | override val maxBatchSize = Some(2) 68 | 69 | override val batchExecution = InParallel 70 | } 71 | } 72 | 73 | case class BatchedDataBigId( 74 | str1: String, 75 | str2: String, 76 | str3: String 77 | ) 78 | 79 | object BigIdData extends Data[BatchedDataBigId, String] { 80 | def name = "Big id batching" 81 | 82 | implicit def source[F[_]: Concurrent]: DataSource[F, BatchedDataBigId, String] = 83 | new DataSource[F, BatchedDataBigId, String] { 84 | override def data = BigIdData 85 | 86 | override def CF = Concurrent[F] 87 | 88 | override def fetch(request: BatchedDataBigId): F[Option[String]] = 89 | batch(NonEmptyList.one(request)).map(_.get(request)) 90 | 91 | override def batch(ids: NonEmptyList[BatchedDataBigId]): F[Map[BatchedDataBigId, String]] = 92 | CF.pure( 93 | ids.map(id => id -> id.toString).toList.toMap 94 | ) 95 | 96 | override val batchExecution = InParallel 97 | } 98 | } 99 | 100 | case class BatchAcrossFetchData(id: Int) 101 | 102 | object BatchAcrossFetches extends Data[BatchAcrossFetchData, String] { 103 | def name = "Batch across Fetches" 104 | 105 | private val batchesCounter = new AtomicInteger(0) 106 | private val fetchesCounter = new AtomicInteger(0) 107 | 108 | def reset(): Unit = { 109 | batchesCounter.set(0) 110 | fetchesCounter.set(0) 111 | } 112 | 113 | def counters: (Int, Int) = 114 | (fetchesCounter.get(), batchesCounter.get()) 115 | 116 | def unBatchedSource[F[_]: Concurrent]: DataSource[F, BatchAcrossFetchData, String] = 117 | new DataSource[F, BatchAcrossFetchData, String] { 118 | override def data = BatchAcrossFetches 119 | 120 | override def CF = Concurrent[F] 121 | 122 | override def fetch(request: BatchAcrossFetchData): F[Option[String]] = { 123 | fetchesCounter.incrementAndGet() 124 | CF.pure(Some(request.toString)) 125 | } 126 | 127 | override def batch( 128 | ids: NonEmptyList[BatchAcrossFetchData] 129 | ): F[Map[BatchAcrossFetchData, String]] = { 130 | batchesCounter.incrementAndGet() 131 | CF.pure( 132 | ids.map(id => id -> id.toString).toList.toMap 133 | ) 134 | } 135 | 136 | override val batchExecution = InParallel 137 | } 138 | 139 | def batchedSource[F[_]: Async]( 140 | interval: FiniteDuration 141 | ): Resource[F, DataSource[F, BatchAcrossFetchData, String]] = 142 | DataSource.batchAcrossFetches(unBatchedSource, interval) 143 | } 144 | 145 | def fetchBatchedDataSeq[F[_]: Concurrent](id: Int): Fetch[F, Int] = 146 | Fetch(BatchedDataSeq(id), SeqBatch.source) 147 | 148 | def fetchBatchedDataPar[F[_]: Concurrent](id: Int): Fetch[F, Int] = 149 | Fetch(BatchedDataPar(id), ParBatch.source) 150 | 151 | def fetchBatchedDataBigId[F[_]: Concurrent](id: BatchedDataBigId): Fetch[F, String] = 152 | Fetch(id, BigIdData.source) 153 | 154 | "A large fetch to a datasource with a maximum batch size is split and executed in sequence" in { 155 | def fetch[F[_]: Concurrent]: Fetch[F, List[Int]] = 156 | List.range(1, 6).map(fetchBatchedDataSeq[F]).batchAll 157 | 158 | val io = Fetch.runLog[IO](fetch) 159 | 160 | io.map { case (log, result) => 161 | result shouldEqual List(1, 2, 3, 4, 5) 162 | log.rounds.size shouldEqual 1 163 | totalFetched(log.rounds) shouldEqual 5 164 | totalBatches(log.rounds) shouldEqual 3 165 | }.unsafeToFuture() 166 | } 167 | 168 | "A large fetch to a datasource with a maximum batch size is split and executed in parallel" in { 169 | def fetch[F[_]: Concurrent]: Fetch[F, List[Int]] = 170 | List.range(1, 6).map(fetchBatchedDataPar[F]).batchAll 171 | 172 | val io = Fetch.runLog[IO](fetch) 173 | 174 | io.map { case (log, result) => 175 | result shouldEqual List(1, 2, 3, 4, 5) 176 | log.rounds.size shouldEqual 1 177 | totalFetched(log.rounds) shouldEqual 5 178 | totalBatches(log.rounds) shouldEqual 3 179 | }.unsafeToFuture() 180 | } 181 | 182 | "Fetches to datasources with a maximum batch size should be split and executed in parallel and sequentially when using productR" in { 183 | def fetch[F[_]: Concurrent]: Fetch[F, List[Int]] = 184 | List.range(1, 6).map(fetchBatchedDataPar[F]).batchAll *> 185 | List.range(1, 6).map(fetchBatchedDataSeq[F]).batchAll 186 | 187 | val io = Fetch.runLog[IO](fetch) 188 | 189 | io.map { case (log, result) => 190 | result shouldEqual List(1, 2, 3, 4, 5) 191 | log.rounds.size shouldEqual 1 192 | totalFetched(log.rounds) shouldEqual 5 + 5 193 | totalBatches(log.rounds) shouldEqual 3 + 3 194 | }.unsafeToFuture() 195 | } 196 | 197 | "Fetches to datasources with a maximum batch size should be split and executed in parallel and sequentially when using productL" in { 198 | def fetch[F[_]: Concurrent]: Fetch[F, List[Int]] = 199 | List.range(1, 6).map(fetchBatchedDataPar[F]).batchAll <* 200 | List.range(1, 6).map(fetchBatchedDataSeq[F]).batchAll 201 | 202 | val io = Fetch.runLog[IO](fetch) 203 | 204 | io.map { case (log, result) => 205 | result shouldEqual List(1, 2, 3, 4, 5) 206 | log.rounds.size shouldEqual 1 207 | totalFetched(log.rounds) shouldEqual 5 + 5 208 | totalBatches(log.rounds) shouldEqual 3 + 3 209 | }.unsafeToFuture() 210 | } 211 | 212 | "A large (many) fetch to a datasource with a maximum batch size is split and executed in sequence" in { 213 | def fetch[F[_]: Concurrent]: Fetch[F, List[Int]] = 214 | List(1, 2, 3).map(fetchBatchedDataSeq[F]).batchAll 215 | 216 | val io = Fetch.runLog[IO](fetch) 217 | 218 | io.map { case (log, result) => 219 | result shouldEqual List(1, 2, 3) 220 | log.rounds.size shouldEqual 1 221 | totalFetched(log.rounds) shouldEqual 3 222 | totalBatches(log.rounds) shouldEqual 2 223 | }.unsafeToFuture() 224 | } 225 | 226 | "A large (many) fetch to a datasource with a maximum batch size is split and executed in parallel" in { 227 | def fetch[F[_]: Concurrent]: Fetch[F, List[Int]] = 228 | List(1, 2, 3).map(fetchBatchedDataPar[F]).batchAll 229 | 230 | val io = Fetch.runLog[IO](fetch) 231 | 232 | io.map { case (log, result) => 233 | result shouldEqual List(1, 2, 3) 234 | log.rounds.size shouldEqual 1 235 | totalFetched(log.rounds) shouldEqual 3 236 | totalBatches(log.rounds) shouldEqual 2 237 | }.unsafeToFuture() 238 | } 239 | 240 | "Very deep fetches don't overflow stack or heap" in { 241 | val depth = 5000 242 | val ids = for { 243 | id <- 0 to depth 244 | } yield BatchedDataBigId( 245 | str1 = "longString" + id, 246 | str2 = "longString" + (id + 1), 247 | str3 = "longString" + (id + 2) 248 | ) 249 | 250 | val io = Fetch.runLog[IO]( 251 | ids.toList.map(fetchBatchedDataBigId[IO]).batchAll 252 | ) 253 | 254 | io.map { case (log, result) => 255 | result shouldEqual ids.map(_.toString) 256 | }.unsafeToFuture() 257 | } 258 | 259 | "Fetches produced across unrelated fetches to a DataSource that is NOT batched across fetch executions should NOT be bundled together" in { 260 | BatchAcrossFetches.reset() 261 | val dataSource = BatchAcrossFetches.unBatchedSource[IO] 262 | val id1 = BatchAcrossFetchData(1) 263 | val id2 = BatchAcrossFetchData(2) 264 | val execution1 = Fetch.run[IO](Fetch(id1, dataSource)) 265 | val execution2 = Fetch.run[IO](Fetch(id2, dataSource)) 266 | val singleExecution = (execution1, execution2).parMapN { (_, _) => 267 | val (fetchRequests, batchRequests) = BatchAcrossFetches.counters 268 | fetchRequests shouldEqual 2 269 | batchRequests shouldEqual 0 270 | } 271 | singleExecution.unsafeToFuture() 272 | } 273 | 274 | "Fetches produced across unrelated fetches to a DataSource that is batched across fetch executions should be bundled together" in { 275 | BatchAcrossFetches.reset() 276 | val dataSource = BatchAcrossFetches.batchedSource[IO](500.millis) 277 | val id1 = BatchAcrossFetchData(1) 278 | val id2 = BatchAcrossFetchData(2) 279 | dataSource 280 | .use { dataSource => 281 | val execution1 = Fetch.run[IO](Fetch(id1, dataSource)) 282 | val execution2 = Fetch.run[IO](Fetch(id2, dataSource)) 283 | val singleExecution = (execution1, execution2).parMapN { (_, _) => 284 | val (fetchRequests, batchRequests) = BatchAcrossFetches.counters 285 | fetchRequests shouldEqual 0 286 | batchRequests shouldEqual 1 287 | } 288 | singleExecution 289 | } 290 | .unsafeToFuture() 291 | } 292 | } 293 | -------------------------------------------------------------------------------- /fetch/src/test/scala/FetchReportingTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats.effect._ 20 | import cats.instances.list._ 21 | import cats.syntax.all._ 22 | 23 | class FetchReportingTests extends FetchSpec { 24 | import TestHelper._ 25 | 26 | "Plain values have no rounds of execution" in { 27 | def fetch[F[_]: Concurrent] = 28 | Fetch.pure[F, Int](42) 29 | 30 | val io = Fetch.runLog[IO](fetch) 31 | 32 | io.map { case (log, result) => 33 | log.rounds.size shouldEqual 0 34 | }.unsafeToFuture() 35 | } 36 | 37 | "Single fetches are executed in one round" in { 38 | def fetch[F[_]: Concurrent] = 39 | one(1) 40 | 41 | val io = Fetch.runLog[IO](fetch[IO]) 42 | 43 | io.map { case (log, result) => 44 | log.rounds.size shouldEqual 1 45 | }.unsafeToFuture() 46 | } 47 | 48 | "Single fetches are executed in one round per binding in a for comprehension" in { 49 | def fetch[F[_]: Concurrent] = 50 | for { 51 | o <- one(1) 52 | t <- one(2) 53 | } yield (o, t) 54 | 55 | val io = Fetch.runLog[IO](fetch) 56 | 57 | io.map { case (log, result) => 58 | log.rounds.size shouldEqual 2 59 | }.unsafeToFuture() 60 | } 61 | 62 | "Single fetches for different data sources are executed in multiple rounds if they are in a for comprehension" in { 63 | def fetch[F[_]: Concurrent] = 64 | for { 65 | o <- one(1) 66 | m <- many(3) 67 | } yield (o, m) 68 | 69 | val io = Fetch.runLog[IO](fetch) 70 | 71 | io.map { case (log, result) => 72 | log.rounds.size shouldEqual 2 73 | }.unsafeToFuture() 74 | } 75 | 76 | "Single fetches combined with cartesian are run in one round" in { 77 | def fetch[F[_]: Concurrent] = 78 | (one(1), many(3)).tupled 79 | 80 | val io = Fetch.runLog[IO](fetch) 81 | 82 | io.map { case (log, result) => 83 | log.rounds.size shouldEqual 1 84 | }.unsafeToFuture() 85 | } 86 | 87 | "Single fetches combined with traverse are run in one round" in { 88 | def fetch[F[_]: Concurrent] = 89 | for { 90 | manies <- many(3) // round 1 91 | ones <- manies.traverse(one[F]) // rounds 2, 3, 4 92 | } yield ones 93 | 94 | val io = Fetch.runLog[IO](fetch) 95 | 96 | io.map { case (log, result) => 97 | log.rounds.size shouldEqual 2 98 | }.unsafeToFuture() 99 | } 100 | 101 | "Single fetches combined with Fetch.batchAll are run in one round" in { 102 | def fetch[F[_]: Concurrent] = 103 | for { 104 | manies <- many(3) // round 1 105 | ones <- Fetch.batchAll(manies.map(one[F]): _*) // round 2 106 | } yield ones 107 | 108 | val io = Fetch.runLog[IO](fetch) 109 | 110 | io.map { case (log, result) => 111 | log.rounds.size shouldEqual 2 112 | }.unsafeToFuture() 113 | } 114 | 115 | "The product of two fetches from the same data source implies batching" in { 116 | def fetch[F[_]: Concurrent] = 117 | (one(1), one(3)).tupled 118 | 119 | val io = Fetch.runLog[IO](fetch) 120 | 121 | io.map { case (log, result) => 122 | log.rounds.size shouldEqual 1 123 | }.unsafeToFuture() 124 | } 125 | 126 | "The product of concurrent fetches of the same type implies everything fetched in batches" in { 127 | def aFetch[F[_]: Concurrent] = 128 | for { 129 | a <- one(1) // round 1 (batched) 130 | b <- one(2) // round 2 (cached) 131 | c <- one(3) // round 3 (deduplicated) 132 | } yield c 133 | 134 | def anotherFetch[F[_]: Concurrent] = 135 | for { 136 | a <- one(2) // round 1 (batched) 137 | m <- many(4) // round 2 138 | c <- one(3) // round 3 (deduplicated) 139 | } yield c 140 | 141 | def fetch[F[_]: Concurrent] = 142 | ((aFetch, anotherFetch).tupled, one(3)).tupled 143 | 144 | val io = Fetch.runLog[IO](fetch) 145 | 146 | io.map { case (log, result) => 147 | log.rounds.size shouldEqual 3 148 | totalBatches(log.rounds) shouldEqual 1 149 | totalFetched(log.rounds) shouldEqual 3 + 1 150 | }.unsafeToFuture() 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /fetch/src/test/scala/FetchSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats.effect._ 20 | import org.scalatest.DoNotDiscover 21 | import org.scalatest.freespec.AsyncFreeSpec 22 | import org.scalatest.matchers.should.Matchers 23 | 24 | import scala.concurrent._ 25 | 26 | @DoNotDiscover 27 | class FetchSpec extends AsyncFreeSpec with Matchers { 28 | override val executionContext: ExecutionContext = ExecutionContext.Implicits.global 29 | implicit val ioRuntime: unsafe.IORuntime = unsafe.IORuntime.global 30 | 31 | def countFetches(r: Request): Int = 32 | r.request match { 33 | case FetchOne(_, _, false) => 1 34 | case Batch(ids, _, false) => ids.toList.size 35 | case FetchOne(_, _, true) => 0 36 | case Batch(_, _, true) => 0 37 | } 38 | 39 | def totalFetched(rs: Seq[Round]): Int = 40 | rs.map((round: Round) => round.queries.map(countFetches).sum).toList.sum 41 | 42 | def countBatches(r: Request): Int = 43 | r.request match { 44 | case FetchOne(_, _, false) => 0 45 | case Batch(_, _, false) => 1 46 | case FetchOne(_, _, true) => 0 47 | case Batch(_, _, true) => 0 48 | } 49 | 50 | def totalBatches(rs: Seq[Round]): Int = 51 | rs.map((round: Round) => round.queries.map(countBatches).sum).toList.sum 52 | } 53 | -------------------------------------------------------------------------------- /fetch/src/test/scala/FetchSyntaxTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats.syntax.all._ 20 | import cats.effect._ 21 | 22 | import fetch.syntax._ 23 | import fetch.Fetch 24 | 25 | class FetchSyntaxTests extends FetchSpec { 26 | import TestHelper._ 27 | 28 | "`fetch` syntax allows lifting of any value to the context of a fetch" in { 29 | Fetch.pure[IO, Int](42) shouldEqual 42.fetch[IO] 30 | } 31 | 32 | "`fetch` syntax allows lifting of any `Throwable` as a failure on a fetch" in { 33 | case object Ex extends RuntimeException 34 | 35 | def f1[F[_]: Concurrent] = 36 | Fetch.error[F, Int](Ex) 37 | 38 | def f2[F[_]: Concurrent] = 39 | Ex.fetch[F] 40 | 41 | val io1 = Fetch.run[IO](f1) 42 | val io2 = Fetch.run[IO](f2) 43 | 44 | val e1 = io1.handleError(err => 42) 45 | val e2 = io2.handleError(err => 42) 46 | 47 | (e1, e2).mapN(_ shouldEqual _).unsafeToFuture() 48 | } 49 | 50 | "`batchAll` syntax allows batching sequences of fetches and is equivalent to Fetch.batchAll" in { 51 | def fetches[F[_]: Concurrent] = List(1, 2, 3).map(one[IO]) 52 | val fetchWithSyntax = fetches[IO].batchAll 53 | val fetchWithOtherSyntax = List(1, 2, 3).batchAllWith(one[IO]) 54 | val fetchManual = Fetch.batchAll(fetches[IO]: _*) 55 | 56 | val result1 = Fetch.runLog[IO](fetchWithSyntax) 57 | val result2 = Fetch.runLog[IO](fetchWithOtherSyntax) 58 | val result3 = Fetch.runLog[IO](fetchManual) 59 | 60 | (result1, result2, result3).tupled 61 | .map { case ((log1, r1), (log2, r2), (log3, r3)) => 62 | Set(r1, r2, r3).size shouldBe 1 63 | 64 | log1.rounds.size shouldBe 1 65 | log2.rounds.size shouldBe 1 66 | log3.rounds.size shouldBe 1 67 | } 68 | .unsafeToFuture() 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /fetch/src/test/scala/TestHelper.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016-2023 47 Degrees Open Source 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package fetch 18 | 19 | import cats._ 20 | import cats.effect._ 21 | import cats.data.NonEmptyList 22 | 23 | import scala.collection.immutable.Map 24 | 25 | object TestHelper { 26 | case class AnException() extends Throwable 27 | 28 | object One extends Data[Int, Int] { 29 | def name = "One" 30 | 31 | def source[F[_]: Concurrent]: DataSource[F, Int, Int] = 32 | new DataSource[F, Int, Int] { 33 | override def data = One 34 | 35 | override def CF = Concurrent[F] 36 | 37 | override def fetch(id: Int): F[Option[Int]] = 38 | CF.pure(Option(id)) 39 | 40 | override def batch(ids: NonEmptyList[Int]): F[Map[Int, Int]] = 41 | CF.pure( 42 | ids.toList.map((v) => (v, v)).toMap 43 | ) 44 | } 45 | } 46 | 47 | def one[F[_]: Concurrent](id: Int): Fetch[F, Int] = 48 | Fetch(id, One.source) 49 | 50 | object Many extends Data[Int, List[Int]] { 51 | def name = "Many" 52 | 53 | def source[F[_]: Concurrent]: DataSource[F, Int, List[Int]] = 54 | new DataSource[F, Int, List[Int]] { 55 | override def data = Many 56 | 57 | override def CF = Concurrent[F] 58 | 59 | override def fetch(id: Int): F[Option[List[Int]]] = 60 | CF.pure(Option(0 until id toList)) 61 | } 62 | } 63 | 64 | def many[F[_]: Concurrent](id: Int): Fetch[F, List[Int]] = 65 | Fetch(id, Many.source) 66 | 67 | object AnotherOne extends Data[Int, Int] { 68 | def name = "Another one" 69 | 70 | def source[F[_]: Concurrent]: DataSource[F, Int, Int] = 71 | new DataSource[F, Int, Int] { 72 | override def data = AnotherOne 73 | 74 | override def CF = Concurrent[F] 75 | 76 | override def fetch(id: Int): F[Option[Int]] = 77 | CF.pure(Option(id)) 78 | 79 | override def batch(ids: NonEmptyList[Int]): F[Map[Int, Int]] = 80 | CF.pure( 81 | ids.toList.map((v) => (v, v)).toMap 82 | ) 83 | } 84 | } 85 | 86 | def anotherOne[F[_]: Concurrent](id: Int): Fetch[F, Int] = 87 | Fetch(id, AnotherOne.source) 88 | 89 | case class Never() 90 | 91 | object Never extends Data[Never, Int] { 92 | def name = "Never" 93 | 94 | def source[F[_]: Concurrent]: DataSource[F, Never, Int] = 95 | new DataSource[F, Never, Int] { 96 | override def data = Never 97 | 98 | override def CF = Concurrent[F] 99 | 100 | override def fetch(id: Never): F[Option[Int]] = 101 | CF.pure(None: Option[Int]) 102 | } 103 | } 104 | 105 | def never[F[_]: Concurrent]: Fetch[F, Int] = 106 | Fetch(Never(), Never.source) 107 | 108 | } 109 | -------------------------------------------------------------------------------- /microsite/docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: fetch-home 3 | --- 4 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_data/commons.yml: -------------------------------------------------------------------------------- 1 | repoUrl: "https://github.com/47degrees/fetch" 2 | organizationUrl: "https://www.47deg.com" 3 | organization: "Xebia Functional" 4 | description: "Simple & Efficient data fetching" 5 | keywords: "functional-programming, kotlin, kotlin-library, scala, scala-js, cats, monads, monix, data, data-fetching, parallelism, concurrency, sequencing, for-comprehension, category-theory" 6 | owner: "Xebia Functional" 7 | repo: "fetch" 8 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_data/features.yml: -------------------------------------------------------------------------------- 1 | content: 2 | - title: Define 3 | description: Tell Fetch how your data is fetched and define your data-fetching functions using functional combinators. 4 | icon: img/icon-feature-first.svg 5 | 6 | - title: Run 7 | description: Fetch will run your data-fetching code and apply multiple optimizations such as batching, caching and deduplication. 8 | icon: img/icon-feature-second.svg 9 | 10 | - title: Inspect 11 | description: The execution log of a Fetch can be inspected and it contains information about fetch scheduling, results and timings. 12 | icon: img/icon-feature-third.svg 13 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_data/menu.yml: -------------------------------------------------------------------------------- 1 | nav: 2 | - title: Documentation 3 | url: /docs 4 | 5 | - title: Github 6 | url: https://github.com/47degrees/fetch 7 | 8 | - title: License 9 | url: https://github.com/47degrees/fetch/blob/main/LICENSE 10 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_includes/_fetch-footer.html: -------------------------------------------------------------------------------- 1 | 19 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_includes/_fetch-head.html: -------------------------------------------------------------------------------- 1 | 2 | {{site.name}} 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_includes/_fetch-header.html: -------------------------------------------------------------------------------- 1 | 12 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_includes/_fetch-main.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 | {% for item in site.data.features.content %} 5 |
6 | {{ item.title }} 7 |

{{ item.title }}

8 |

{{ item.description }}

9 |
10 | {% endfor %} 11 |
12 |
13 |
14 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_includes/_fetch-navigation.html: -------------------------------------------------------------------------------- 1 | 33 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/_layouts/fetch-home.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | {% include _fetch-head.html %} 4 | 5 | {% include _fetch-navigation.html %} 6 | {% include _fetch-header.html %} 7 | {% include _fetch-main.html %} 8 | {% include _fetch-footer.html %} 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/css/custom.scss: -------------------------------------------------------------------------------- 1 | --- 2 | # Custom 3 | --- 4 | 5 | @import "variables"; 6 | @import "breakpoint"; 7 | @import "header"; 8 | @import "navigation"; 9 | @import "main"; 10 | @import "footer"; 11 | @import "fetch_docs"; 12 | @import "components"; 13 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/custom-config.yml: -------------------------------------------------------------------------------- 1 | # ------------------------- 2 | # These settings will be merged with the properties coming from 3 | # sbt-microsites in order to generate the final and single _config.yml file 4 | # ------------------------- 5 | sass: 6 | load_paths: 7 | - _sass 8 | - static/_sass 9 | style: compressed 10 | # ------------------------- 11 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/favicon.ico -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/favicon.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/icon-feature-first.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 25D019FF-9108-4554-BD30-CB92907FB96F 5 | Created with sketchtool. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/icon-feature-second.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | D25D4B66-5D7A-4963-A20C-BF9210C2855E 5 | Created with sketchtool. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/icon-feature-third.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | DB30719F-CD45-47DB-8887-3821AD36BF79 5 | Created with sketchtool. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/nav-icon-close.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | nav-icon-close 5 | Created with Sketch. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/nav-icon-open.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | nav-icon-open 5 | Created with Sketch. 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/navbar_brand.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/navbar_brand.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/navbar_brand.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | navbar_brand 5 | Created with Sketch. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/navbar_brand2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/navbar_brand2x.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/pattern-background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/pattern-background.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/pattern-background.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | pattern-background 5 | Created with Sketch. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/pattern-background@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/pattern-background@2x.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/poster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/poster.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/sidebar_brand.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/sidebar_brand.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/sidebar_brand.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | FEE1BE49-5D79-4A02-818D-3B16F41CD269 5 | Created with sketchtool. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/sidebar_brand2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/sidebar_brand2x.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/img/twitter-card.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xebia-functional/fetch/5ab50dcbf202d77e1108f7a2f13dadfd42493031/microsite/src/main/resources/microsite/img/twitter-card.png -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/js/automenu.js: -------------------------------------------------------------------------------- 1 | jQuery(document).ready(function() { 2 | activeLinks(); 3 | organizeContent(); 4 | }); 5 | 6 | function organizeContent() { 7 | var content = $('#content'); 8 | var subcontent = $('
'); 9 | content.prepend(subcontent); 10 | content.find('h1').each(function(index) { 11 | var section = $('
'); 12 | subcontent.append(section); 13 | var h1 = $(this); 14 | var elements = h1.nextUntil('h1'); 15 | var text = h1.text(); 16 | var slug = slugify(text) + '-' + index; 17 | addSectionToSidebar(text, slug); 18 | section.append(makeSectionAnchor(h1, text, slug)); 19 | if (elements.length > 0) { 20 | elements.appendTo(section); 21 | organizeSubSection(slug, elements); 22 | } 23 | }); 24 | removeEmptyList(); 25 | } 26 | 27 | function organizeSubSection(s, children) { 28 | children.filter('h2').each(function(index, el) { 29 | var h2 = $(this); 30 | var text = h2.text(); 31 | var slug = s + '-' + slugify(text) + '-' + index; 32 | var a = makeSectionAnchor(h2, text, slug); 33 | addSubSectionToSidebar(text, slug, s); 34 | }); 35 | } 36 | 37 | function makeSectionAnchor(h, text, slug) { 38 | var a = $('').attr({ 39 | 'class': 'anchor', 40 | 'name': slug, 41 | 'href': '#' + slug 42 | }); 43 | a.append(h.clone()); 44 | h.replaceWith(a); 45 | return a; 46 | } 47 | 48 | function addSectionToSidebar(text, slug) { 49 | var ul = $('
    ').addClass('sub_section'); 50 | var a = $('' + text + ''); 51 | a.find('.fa-angle-right').css('padding-top', '0.7em'); 52 | var li = $('
  • '); 53 | li.append(a).append(ul); 54 | ul.hide(); 55 | $('#sidebar').append(li); 56 | a.click(function(event) { 57 | $('#sidebar li').add('#sidebar a').removeClass('active'); 58 | $('#sidebar .sub_section').not(ul).slideUp(); 59 | ul.slideToggle('fast'); 60 | li.add(a).toggleClass('active'); 61 | }); 62 | } 63 | 64 | function addSubSectionToSidebar(text, slug, s) { 65 | var ul = $('#sidebar li.' + s + ' ul'); 66 | var li = $('
  • ' + text + '
  • '); 67 | ul.append(li); 68 | } 69 | 70 | function removeEmptyList() { 71 | $('#sidebar>li').not('.sidebar-brand').each(function(index, el) { 72 | var li = $(this); 73 | var children = li.find('li'); 74 | if (children.size() == 0) { 75 | li.find('span').remove(); 76 | } 77 | }); 78 | } 79 | 80 | function slugify(text) { 81 | return text.toString().toLowerCase() 82 | .replace(/\s+/g, '-') // Replace spaces with - 83 | .replace(/[^\w\-]+/g, '') // Remove all non-word chars 84 | .replace(/\-\-+/g, '-') // Replace multiple - with single - 85 | .replace(/^-+/, '') // Trim - from start of text 86 | .replace(/-+$/, ''); // Trim - from end of text 87 | } 88 | 89 | function activeLinks() { 90 | $('a[data-href]').each(function(index, el) { 91 | $(this).attr('href', $(this).attr('data-href')); 92 | }); 93 | } 94 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_breakpoint.scss: -------------------------------------------------------------------------------- 1 | // Breakpoint 2 | // ----------------------------------------------- 3 | // ----------------------------------------------- 4 | @mixin bp($point) { 5 | @if $point==xlarge { 6 | @media (max-width: $bp-xlarge) { 7 | @content; 8 | } 9 | } 10 | 11 | @if $point==large { 12 | @media (max-width: $bp-large) { 13 | @content; 14 | } 15 | } 16 | 17 | @if $point==medium { 18 | @media (max-width: $bp-medium) { 19 | @content; 20 | } 21 | } 22 | 23 | @if $point==small { 24 | @media (max-width: $bp-small) { 25 | @content; 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_components.scss: -------------------------------------------------------------------------------- 1 | // COMPONENTS 2 | // ----------------------------------------------- 3 | // ----------------------------------------------- 4 | 5 | body { 6 | color: $brand-secondary; 7 | font-family: $font-family-poppins; 8 | } 9 | 10 | #fetch-home { 11 | background-image: url('../img/pattern-background.svg'); 12 | background-repeat: no-repeat; 13 | } 14 | 15 | ol, 16 | ul { 17 | list-style: none; 18 | } 19 | 20 | // Buttons 21 | 22 | .btn { 23 | padding: 12px 40px; 24 | border-radius: 0; 25 | 26 | &:hover { 27 | background: $brand-secondary; 28 | } 29 | } 30 | 31 | .wrapper { 32 | padding: 0 ($base-point * 3); 33 | margin: 0 auto; 34 | box-sizing: border-box; 35 | max-width: $container-width; 36 | } 37 | 38 | .button { 39 | display: block; 40 | background: none; 41 | border: none; 42 | outline: none; 43 | text-decoration: none; 44 | position: relative; 45 | 46 | &:hover { 47 | cursor: pointer; 48 | } 49 | 50 | > img { 51 | vertical-align: bottom; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_fetch_docs.scss: -------------------------------------------------------------------------------- 1 | // Docs 2 | // ----------------------------------------------- 3 | // ----------------------------------------------- 4 | #sidebar-wrapper { 5 | background-color: $white-color; 6 | border-right: 1px solid #CED8DC; 7 | overflow-x: hidden; 8 | 9 | #sidebar { 10 | .sidebar-brand { 11 | .brand { 12 | .brand-wrapper { 13 | span { 14 | margin-top: 0; 15 | letter-spacing: $base-point / 2; 16 | font-size: $base-point * 2; 17 | padding-left: $base-point * 2; 18 | } 19 | } 20 | } 21 | } 22 | } 23 | } 24 | 25 | .sidebar-nav { 26 | background-color: $white-color; 27 | > li { 28 | > a { 29 | color: $brand-secondary; 30 | &.active { 31 | background: $brand-tertiary; 32 | border-left: 3px solid $brand-primary; 33 | color: $brand-secondary; 34 | } 35 | &:hover { 36 | background: $brand-tertiary; 37 | color: $brand-secondary; 38 | } 39 | > span { 40 | display: none; 41 | } 42 | } 43 | ul { 44 | li { 45 | font-size: 14px; 46 | a { 47 | background: $brand-tertiary; 48 | color: darken($brand-tertiary, 50%); 49 | line-height: normal; 50 | padding-bottom: $base-point * 2; 51 | padding-top: $base-point * 2; 52 | &:hover, 53 | &:active { 54 | color: $brand-secondary; 55 | } 56 | } 57 | } 58 | } 59 | } 60 | > .sidebar-brand { 61 | a { 62 | background-color: $white-color; 63 | border-bottom: 1px solid $line-color; 64 | color: $brand-secondary; 65 | text-transform: uppercase; 66 | height: $docs-brand-height; 67 | 68 | &:hover { 69 | background: $brand-tertiary; 70 | color: $brand-secondary; 71 | } 72 | } 73 | } 74 | } 75 | 76 | 77 | #page-content-wrapper { 78 | background: $white-color; 79 | 80 | .nav { 81 | height: $docs-brand-height; 82 | max-height: $docs-brand-height; 83 | border-bottom: 1px solid $line-color; 84 | } 85 | 86 | section{ 87 | margin: 0; 88 | } 89 | 90 | pre .hljs { 91 | background-color: $brand-tertiary; 92 | } 93 | 94 | 95 | } 96 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_footer.scss: -------------------------------------------------------------------------------- 1 | // Footer 2 | // ----------------------------------------------- 3 | // ----------------------------------------------- 4 | #site-footer { 5 | height: 300px; 6 | padding: ($base-point * 10) 0; 7 | background: $brand-tertiary; 8 | color: $brand-secondary; 9 | 10 | a { 11 | color: $gray-primary; 12 | transition: color $base-duration $base-timing; 13 | 14 | &:visited { 15 | color: $gray-primary; 16 | } 17 | 18 | &:hover { 19 | color: darken($brand-tertiary, 50%); 20 | text-decoration: underline; 21 | } 22 | 23 | &:active { 24 | color: $gray-primary; 25 | } 26 | } 27 | 28 | p { 29 | color: lighten($brand-secondary, 3%) 30 | } 31 | 32 | .footer-flex { 33 | display: flex; 34 | justify-content: space-between; 35 | 36 | .footer-dev { 37 | width: $column-5; 38 | 39 | a { 40 | color: $gray-primary; 41 | 42 | &:visited { 43 | color: $gray-primary; 44 | } 45 | 46 | &:hover { 47 | color: darken($brand-tertiary, 50%); 48 | } 49 | 50 | &:active { 51 | color: $gray-primary; 52 | } 53 | } 54 | } 55 | 56 | .footer-menu { 57 | display: flex; 58 | 59 | li { 60 | &:not(:last-child) { 61 | margin-right: ($base-point * 4) 62 | } 63 | } 64 | } 65 | } 66 | } 67 | 68 | // Responsive 69 | // ----------------------------------------------- 70 | 71 | @include bp(medium) { 72 | #site-footer { 73 | .footer-flex { 74 | justify-content: center; 75 | flex-wrap: wrap; 76 | 77 | .footer-dev, 78 | .footer-menu { 79 | width: $column-8; 80 | } 81 | 82 | .footer-dev { 83 | padding-bottom: ($base-point * 2); 84 | margin-bottom: ($base-point * 2); 85 | text-align: center; 86 | border-bottom: 1px solid $line-color; 87 | } 88 | 89 | 90 | .footer-menu { 91 | justify-content: center; 92 | 93 | li { 94 | &:not(:last-child) { 95 | margin-right: ($base-point * 2); 96 | } 97 | } 98 | } 99 | } 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_header.scss: -------------------------------------------------------------------------------- 1 | #site-header { 2 | padding: ($base-point * 18) 0 ($base-point * 9) 0; 3 | 4 | .header-container { 5 | display: flex; 6 | align-items: center; 7 | 8 | 9 | .header-text { 10 | width: $column-5; 11 | 12 | .title { 13 | color: $brand-secondary; 14 | font-weight: $font-semibold; 15 | font-size: 60px; 16 | margin-bottom: $base-point * 4; 17 | line-height: $base-point * 10; 18 | } 19 | 20 | .header-button { 21 | padding: ($base-point * 1.5) ($base-point * 6); 22 | display: inline-block; 23 | text-transform: uppercase; 24 | color: $brand-secondary; 25 | border: 2px solid $brand-secondary; 26 | background: none; 27 | letter-spacing: 1px; 28 | font-size: 14px; 29 | line-height: 21px; 30 | transition: color $base-duration $base-timing, background-color $base-duration $base-timing; 31 | text-decoration: none; 32 | 33 | &:visited { 34 | color: $brand-secondary; 35 | } 36 | 37 | &:hover { 38 | text-decoration: none; 39 | color: $white-color; 40 | background: $brand-secondary; 41 | } 42 | 43 | &:active { 44 | color: $white-color; 45 | background: rgba($brand-secondary, 0.8); 46 | 47 | } 48 | } 49 | } 50 | 51 | .header-image { 52 | width: $column-7; 53 | text-align: center; 54 | } 55 | } 56 | } 57 | 58 | #fetch-animation { 59 | width: 700px; 60 | } 61 | 62 | // Responsive 63 | // ----------------------------------------------- 64 | 65 | @include bp(large) { 66 | #site-header { 67 | .header-container { 68 | .header-text { 69 | h1 { 70 | font-size: 2.9rem; 71 | } 72 | } 73 | .header-image { 74 | img { 75 | width: 100%; 76 | } 77 | } 78 | } 79 | } 80 | } 81 | @include bp(medium) { 82 | #site-header { 83 | padding: ($base-point * 20) 0 0 0; 84 | .header-container { 85 | flex-direction: column; 86 | .header-text { 87 | text-align: center; 88 | width: $column-12; 89 | padding-bottom: ($base-point * 10); 90 | 91 | h1 { 92 | font-size: 2.5rem; 93 | } 94 | } 95 | .header-image { 96 | display: none; 97 | } 98 | } 99 | } 100 | #fetch-animation { 101 | display: none; 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_main.scss: -------------------------------------------------------------------------------- 1 | // Features 2 | // ----------------------------------------------- 3 | // ----------------------------------------------- 4 | 5 | #site-main { 6 | padding: ($base-point * 10) 0; 7 | 8 | .main-flex { 9 | display: flex; 10 | justify-content: space-between; 11 | 12 | .main-item { 13 | width: $column-4; 14 | text-align: center; 15 | 16 | &:not(:last-child) { 17 | margin-right: $gutter-margin; 18 | } 19 | 20 | h2, 21 | p { 22 | color: $text-color; 23 | } 24 | 25 | h2 { 26 | margin-bottom: $base-point * 3; 27 | font-size: 18px; 28 | text-transform: uppercase; 29 | font-weight: $font-medium; 30 | } 31 | p { 32 | font-size: 14px; 33 | } 34 | } 35 | } 36 | } 37 | 38 | // Responsive 39 | // ----------------------------------------------- 40 | @include bp(medium) { 41 | #site-main { 42 | .main-flex { 43 | flex-direction: column; 44 | .main-item { 45 | width: $column-12; 46 | 47 | &:not(:last-child) { 48 | margin-right: 0; 49 | margin-bottom: ($base-point * 8); 50 | } 51 | } 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_navigation.scss: -------------------------------------------------------------------------------- 1 | // Navigation 2 | #navigation { 3 | position: fixed; 4 | padding: 50px 0; 5 | z-index: 20; 6 | width: 100%; 7 | font-weight: $font-medium; 8 | transition: background-color $base-duration $base-timing, padding $base-duration $base-timing; 9 | 10 | .wrapper { 11 | max-width: $container-width; 12 | } 13 | 14 | &.nav-scroll { 15 | padding: ($base-point * 2) 0; 16 | background: rgba($brand-tertiary, 0.95); 17 | } 18 | 19 | .nav-flex { 20 | display: flex; 21 | justify-content: space-between; 22 | align-items: center; 23 | 24 | font-family: $font-family-poppins; 25 | font-size: 17px; 26 | text-transform: uppercase; 27 | letter-spacing: 1px; 28 | 29 | .nav-brand { 30 | display: flex; 31 | align-items: center; 32 | text-decoration: none; 33 | 34 | &:visited, 35 | &:hover, 36 | &:active { 37 | color: $brand-secondary; 38 | text-decoration: none; 39 | } 40 | span { 41 | margin-left: $base-point * 2; 42 | font-size: $base-point * 2; 43 | letter-spacing: $base-point / 2; 44 | } 45 | } 46 | 47 | .nav-menu { 48 | position: relative; 49 | 50 | ul { 51 | display: flex; 52 | 53 | list-style: none; 54 | display: flex; 55 | 56 | .nav-menu-item { 57 | &:not(:last-child) { 58 | margin-right: ($base-point * 5); 59 | } 60 | 61 | a { 62 | padding-bottom: 4px; 63 | color: $brand-secondary; 64 | font-size: 14px; 65 | text-decoration: none; 66 | 67 | &:hover { 68 | text-decoration: none; 69 | border-bottom: 2px solid $brand-primary; 70 | } 71 | } 72 | } 73 | } 74 | } 75 | 76 | .nav-icon-open { 77 | padding: 16px; 78 | margin: -16px; 79 | display: none; 80 | transition: transform $base-duration $base-timing; 81 | 82 | &:hover { 83 | transform: scaleX(1.5); 84 | } 85 | } 86 | 87 | .nav-icon-close { 88 | display: none; 89 | padding: 6px; 90 | position: absolute; 91 | background: rgba($brand-tertiary, 0.95); 92 | right: 100%; 93 | top: 32px; 94 | 95 | img { 96 | display: block; 97 | transition: transform .3s ease; 98 | 99 | &:hover { 100 | transform: rotate(180deg); 101 | } 102 | } 103 | } 104 | } 105 | } 106 | 107 | // Responsive 108 | // ----------------------------------------------- 109 | @include bp(medium) { 110 | #navigation { 111 | .nav-flex { 112 | .nav-menu { 113 | position: fixed; 114 | padding: ($base-point * 4) ($base-point); 115 | background: rgba($brand-tertiary, 0.95); 116 | height: 100%; 117 | right: -100%; 118 | top: 0; 119 | width: 50%; 120 | z-index: 2; 121 | transition: right $base-duration $base-timing; 122 | 123 | &.open { 124 | right: 0; 125 | } 126 | 127 | ul { 128 | flex-direction: column; 129 | 130 | .nav-menu-item { 131 | padding: $base-point 0; 132 | &:not(:last-child) { 133 | margin-right: 0; 134 | } 135 | } 136 | } 137 | 138 | } 139 | 140 | .nav-icon-open, 141 | .nav-icon-close { 142 | display: block; 143 | } 144 | } 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/_sass/_variables.scss: -------------------------------------------------------------------------------- 1 | // Variables 2 | // ----------------------------------------------- 3 | // ----------------------------------------------- 4 | 5 | // Google Fonts 6 | // ----------------------------------------------- 7 | @import url('https://fonts.googleapis.com/css?family=Poppins:400,500,600&display=swap'); 8 | 9 | // Colors 10 | // ----------------------------------------------- 11 | $brand-primary: #DD4949; 12 | $brand-secondary: #104051; 13 | $brand-tertiary: #F6F8F8; 14 | $gray-primary: darken($brand-tertiary, 35%); 15 | $white-color: #FFF; 16 | $link-color: darken($brand-secondary, 10%); 17 | $link-hover: darken($brand-secondary, 15%); 18 | $line-color: #CED8DC; 19 | $text-color: $brand-secondary; 20 | 21 | // Sizes 22 | // ----------------------------------------------- 23 | $base-point: 8px; 24 | $container-width: 1140px; 25 | $docs-brand-height: 76px; 26 | // Typography 27 | $font-family-poppins: 'Poppins', sans-serif; 28 | 29 | $font-regular: 400; 30 | $font-medium: 500; 31 | $font-semibold: 600; 32 | 33 | 34 | // Breakpoint 35 | $bp-small: 480px; 36 | $bp-medium: 768px; 37 | $bp-large: 992px; 38 | $bp-xlarge: 1140px; 39 | // Animation 40 | // ----------------------------------------------- 41 | $base-duration: 250ms; 42 | $base-timing: ease-in-out; 43 | // Grid 44 | // ----------------------------------------------- 45 | $column-1: (1/12*100%); 46 | $column-2: (2/12*100%); 47 | $column-3: (3/12*100%); 48 | $column-4: (4/12*100%); 49 | $column-5: (5/12*100%); 50 | $column-6: (6/12*100%); 51 | $column-7: (7/12*100%); 52 | $column-8: (8/12*100%); 53 | $column-9: (9/12*100%); 54 | $column-10: (10/12*100%); 55 | $column-11: (11/12*100%); 56 | $column-12: (12/12*100%); 57 | $gutter-margin: ($base-point * 4); 58 | -------------------------------------------------------------------------------- /microsite/src/main/resources/microsite/static/home.js: -------------------------------------------------------------------------------- 1 | // This initialization requires that this script is loaded with `defer` 2 | const navElement = document.querySelector("#navigation"); 3 | 4 | /** 5 | * Toggle an specific class to the received DOM element. 6 | * @param {string} elemSelector The query selector specifying the target element. 7 | * @param {string} [activeClass='active'] The class to be applied/removed. 8 | */ 9 | function toggleClass(elemSelector, activeClass = "active") { 10 | const elem = document.querySelector(elemSelector); 11 | if (elem) { 12 | elem.classList.toggle(activeClass); 13 | } 14 | } 15 | 16 | const scrollTop = Math.max(window.pageYOffset, document.documentElement.scrollTop, document.body.scrollTop) 17 | 18 | // Navigation element modification through scrolling 19 | function scrollFunction() { 20 | if (window.pageYOffset || document.documentElement.scrollTop > scrollTop) { 21 | navElement.classList.add("nav-scroll"); 22 | } else { 23 | navElement.classList.remove("nav-scroll"); 24 | } 25 | } 26 | 27 | // Init call 28 | function loadEvent() { 29 | document.addEventListener("scroll", scrollFunction); 30 | 31 | const lottieAnimation = bodymovin.loadAnimation({ 32 | container: document.getElementById('fetch-animation'), 33 | renderer: 'svg' / 'canvas' / 'html', 34 | loop: true, 35 | autoplay:true, 36 | path: 'static/fetch_animation.json' 37 | }) 38 | 39 | document.getElementById('fetch-animation').addEventListener('load', function() { 40 | lottieAnimation.play(); 41 | }); 42 | } 43 | 44 | // Attach the functions to each event they are interested in 45 | window.addEventListener("load", loadEvent); 46 | -------------------------------------------------------------------------------- /project/ProjectPlugin.scala: -------------------------------------------------------------------------------- 1 | import microsites.MicrositesPlugin.autoImport._ 2 | import com.typesafe.sbt.site.SitePlugin.autoImport._ 3 | import sbt.Keys._ 4 | import sbt._ 5 | import microsites._ 6 | import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ 7 | 8 | object ProjectPlugin extends AutoPlugin { 9 | 10 | override def trigger: PluginTrigger = allRequirements 11 | 12 | object autoImport { 13 | 14 | lazy val commonCrossDependencies = 15 | Seq( 16 | libraryDependencies ++= 17 | Seq( 18 | "org.typelevel" %%% "cats-effect" % "3.6.1", 19 | "org.scalatest" %%% "scalatest" % "3.2.19" % "test" 20 | ) 21 | ) 22 | 23 | lazy val micrositeSettings: Seq[Def.Setting[_]] = Seq( 24 | micrositeName := "Fetch", 25 | micrositeDescription := "Simple & Efficient data fetching", 26 | micrositeBaseUrl := "fetch", 27 | micrositeDocumentationUrl := "/fetch/docs", 28 | micrositeHighlightTheme := "tomorrow", 29 | micrositeExternalLayoutsDirectory := (Compile / resourceDirectory).value / "microsite" / "_layouts", 30 | micrositeExternalIncludesDirectory := (Compile / resourceDirectory).value / "microsite" / "_includes", 31 | micrositeDataDirectory := (Compile / resourceDirectory).value / "microsite" / "_data", 32 | micrositeTheme := "pattern", 33 | micrositePalette := Map( 34 | "brand-primary" -> "#DD4949", 35 | "brand-secondary" -> "#104051", 36 | "brand-tertiary" -> "#EFF2F3", 37 | "gray-dark" -> "#48474C", 38 | "gray" -> "#8D8C92", 39 | "gray-light" -> "#E3E2E3", 40 | "gray-lighter" -> "#F4F3F9", 41 | "white-color" -> "#FFFFFF" 42 | ), 43 | makeSite / includeFilter := "*.html" | "*.css" | "*.png" | "*.svg" | "*.jpg" | "*.gif" | "*.js" | "*.json" | "*.swf" | "*.md", 44 | micrositeGithubToken := Option(System.getenv().get("GITHUB_TOKEN")), 45 | micrositePushSiteWith := GitHub4s, 46 | micrositeConfigYaml := ConfigYml( 47 | yamlPath = Some((Compile / resourceDirectory).value / "microsite" / "custom-config.yml") 48 | ), 49 | micrositeCDNDirectives := CdnDirectives( 50 | cssList = List( 51 | "css/custom.css" 52 | ) 53 | ) 54 | ) 55 | 56 | lazy val docsSettings: Seq[Def.Setting[_]] = 57 | micrositeSettings ++ Seq( 58 | scalacOptions ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))), 59 | doc / aggregate := true 60 | ) 61 | 62 | lazy val examplesSettings = Seq( 63 | libraryDependencies ++= Seq( 64 | "io.circe" %% "circe-generic" % "0.14.13", 65 | "org.tpolecat" %% "doobie-core" % "1.0.0-RC9", 66 | "org.tpolecat" %% "doobie-h2" % "1.0.0-RC9", 67 | "org.tpolecat" %% "atto-core" % "0.9.5", 68 | "org.http4s" %% "http4s-blaze-client" % "0.23.17", 69 | "org.http4s" %% "http4s-circe" % "0.23.30", 70 | "redis.clients" % "jedis" % "6.0.0", 71 | "io.circe" %% "circe-parser" % "0.14.13" % Test, 72 | "org.slf4j" % "slf4j-simple" % "2.0.17" % Test 73 | ) 74 | ) ++ commonCrossDependencies 75 | } 76 | 77 | override def projectSettings: Seq[Def.Setting[_]] = 78 | Seq( 79 | scalacOptions := { 80 | val withStripedLinter = scalacOptions.value filterNot Set("-Xlint", "-Xfuture").contains 81 | (CrossVersion.partialVersion(scalaBinaryVersion.value) match { 82 | case Some((2, 13)) => withStripedLinter :+ "-Ymacro-annotations" 83 | case _ => withStripedLinter 84 | }) :+ "-language:higherKinds" 85 | }, 86 | libraryDependencies ++= { 87 | CrossVersion.partialVersion(scalaVersion.value) match { 88 | case Some((3, _)) => Seq() 89 | case _ => 90 | Seq( 91 | compilerPlugin("org.typelevel" % "kind-projector" % "0.13.3" cross CrossVersion.full), 92 | compilerPlugin("com.olegpy" %% "better-monadic-for" % "0.3.1") 93 | ) 94 | } 95 | }, 96 | developers := List( 97 | Developer( 98 | "47erbot", 99 | "Xebia", 100 | "developer.xf@xebia.com", 101 | url("https://xebia.com/") 102 | ) 103 | ), 104 | scalacOptions := Seq( 105 | "-unchecked", 106 | "-deprecation", 107 | "-feature", 108 | "-Ywarn-dead-code", 109 | "-language:higherKinds", 110 | "-language:existentials", 111 | "-language:postfixOps" 112 | ) ++ (CrossVersion.partialVersion(scalaVersion.value) match { 113 | case Some((3, _)) => Seq("-source:3.0-migration", "-Ykind-projector") 114 | case Some((2, 13)) => Seq("-Ywarn-dead-code") 115 | case _ => Seq("-Ywarn-dead-code", "-Ypartial-unification") 116 | }) 117 | ) 118 | 119 | } 120 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.2 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.11.1") 2 | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2") 3 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.19.0") 4 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4") 5 | addSbtPlugin("com.47deg" % "sbt-microsites" % "1.4.4") 6 | addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.7.1") 7 | addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.10.0") 8 | addSbtPlugin("com.alejandrohdezma" %% "sbt-github" % "0.12.0") 9 | addSbtPlugin("com.alejandrohdezma" % "sbt-github-header" % "0.12.0") 10 | addSbtPlugin("com.alejandrohdezma" % "sbt-github-mdoc" % "0.12.0") 11 | addSbtPlugin("com.alejandrohdezma" % "sbt-mdoc-toc" % "0.4.2") 12 | addSbtPlugin("com.alejandrohdezma" % "sbt-remove-test-from-pom" % "0.1.0") 13 | --------------------------------------------------------------------------------