├── .yarnrc ├── .babelrc ├── .travis.yml ├── .eslintignore ├── .gitignore ├── .prettierignore ├── commitlint.config.js ├── .eslintrc ├── LICENSE ├── package.json ├── CHANGELOG.md ├── src ├── index.js └── __tests__ │ └── index.js └── README.md /.yarnrc: -------------------------------------------------------------------------------- 1 | version-git-message "release: v%s" 2 | -------------------------------------------------------------------------------- /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["@babel/env"] 3 | } 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | cache: yarn 3 | node_js: 4 | - 10 5 | - 8 6 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | # dependencies 2 | node_modules 3 | 4 | # testing 5 | coverage 6 | 7 | # production artifacts 8 | build 9 | dist 10 | lib 11 | 12 | # non-lintable JS/JSON 13 | /packages/generator-*/app/templates 14 | /packages/generator-*/generators/app/templates 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See http://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | node_modules 5 | 6 | # testing 7 | coverage 8 | 9 | # production 10 | build 11 | dist 12 | lib 13 | 14 | # misc 15 | .DS_Store 16 | *.log 17 | *.tgz 18 | .idea 19 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # `yarn` has its own opinions on `package.json`, so `prettier` should ignore it 2 | package.json 3 | 4 | # Generator templates, which aren't valid JS until compiled 5 | templates 6 | 7 | # Non-committed directories 8 | node_modules 9 | coverage 10 | dist 11 | lib 12 | 13 | # Generated Markdown files 14 | CHANGELOG.md 15 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | const typeEnumRules = require("@commitlint/config-angular-type-enum"); 2 | 3 | const typeEnum = typeEnumRules.rules["type-enum"]; 4 | 5 | // Add any additional commit types 6 | typeEnum[2].push("release"); 7 | 8 | module.exports = { 9 | extends: ["@commitlint/config-angular"], 10 | rules: { 11 | "type-enum": typeEnum 12 | } 13 | }; 14 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["json"], 3 | "extends": ["eslint:recommended", "prettier"], 4 | "parser": "babel-eslint", 5 | "parserOptions": { 6 | "ecmaVersion": 6, 7 | "sourceType": "module", 8 | "ecmaFeatures": { 9 | "jsx": true, 10 | "generators": true, 11 | "experimentalObjectRestSpread": true 12 | } 13 | }, 14 | "env": { 15 | "browser": true, 16 | "commonjs": true, 17 | "es6": true, 18 | "node": true, 19 | "jest": true 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Chris Villa 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "combine-pagination", 3 | "version": "0.3.3", 4 | "description": "A JavaScript library to paginate across multiple data sources at once, whilst retaining the sort order.", 5 | "homepage": "https://github.com/chrisvxd/combine-pagination", 6 | "license": "MIT", 7 | "repository": { 8 | "type": "git", 9 | "url": "https://github.com/chrisvxd/combine-pagination.git" 10 | }, 11 | "bugs": { 12 | "url": "https://github.com/chrisvxd/combine-pagination/issues" 13 | }, 14 | "main": "dist/index.js", 15 | "files": [ 16 | "dist/" 17 | ], 18 | "scripts": { 19 | "compile": "babel src -d dist", 20 | "cz": "git-cz", 21 | "format": "yarn run internal:prettier --write", 22 | "internal:prettier": "prettier \"./*.{js,json,md}\" \"./**/*.{js,json,md}\"", 23 | "lint": "npm-run-all --parallel lint:*", 24 | "lint:js": "eslint --ignore-path .eslintignore .", 25 | "lint:json": "eslint --ignore-path .eslintignore --ext .json .", 26 | "lint:md": "remark --quiet --frail .", 27 | "lint:format": "yarn run internal:prettier --list-different", 28 | "precompile": "if [ ${SKIP_CLEANUP:-0} -ne 1 ]; then rimraf lib/*; fi", 29 | "prepublishOnly": "yarn compile", 30 | "test": "jest src --modulePathIgnorePatterns \"/dist/\"", 31 | "release": "conventional-recommended-bump -p angular | xargs yarn version --new-version$1", 32 | "version": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md" 33 | }, 34 | "remarkConfig": { 35 | "presets": [ 36 | "lint-consistent", 37 | "lint-recommended" 38 | ], 39 | "plugins": { 40 | "lint": { 41 | "list-item-indent": "space", 42 | "heading-increment": true 43 | } 44 | } 45 | }, 46 | "dependencies": { 47 | "@babel/polyfill": "^7.2.5" 48 | }, 49 | "devDependencies": { 50 | "@babel/cli": "^7.2.3", 51 | "@babel/core": "^7.3.4", 52 | "@babel/preset-env": "^7.3.4", 53 | "@commitlint/cli": "^7.5.2", 54 | "@commitlint/config-angular": "^7.5.0", 55 | "babel-eslint": "^10.0.1", 56 | "babel-jest": "^24.4.0", 57 | "casual": "^1.6.0", 58 | "commitizen": "^3.0.7", 59 | "conventional-changelog-cli": "^2.0.12", 60 | "conventional-recommended-bump": "^4.0.4", 61 | "eslint": "^5.15.1", 62 | "eslint-config-prettier": "^4.1.0", 63 | "eslint-plugin-json": "~1.4.0", 64 | "jest": "^24.4.0", 65 | "npm-run-all": "^4.1.5", 66 | "prettier": "~1.16.4", 67 | "remark-cli": "^6.0.1", 68 | "remark-lint": "^6.0.4", 69 | "remark-preset-lint-consistent": "2.0.2", 70 | "remark-preset-lint-recommended": "3.0.2", 71 | "rimraf": "^2.6.3" 72 | }, 73 | "config": { 74 | "commitizen": { 75 | "path": "cz-conventional-changelog" 76 | } 77 | }, 78 | "jest": { 79 | "collectCoverage": true, 80 | "collectCoverageFrom": [ 81 | "/src/**/*.{js,jsx,ts,tsx}" 82 | ] 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [0.3.3](https://github.com/chrisvxd/combine-pagination/compare/v0.3.2...v0.3.3) (2019-10-09) 2 | 3 | 4 | ### Bug Fixes 5 | 6 | * fix ascending sortDirection ([cae37b8](https://github.com/chrisvxd/combine-pagination/commit/cae37b8)) 7 | 8 | 9 | 10 | ## [0.3.2](https://github.com/chrisvxd/combine-pagination/compare/v0.3.1...v0.3.2) (2019-07-12) 11 | 12 | 13 | 14 | ## [0.3.1](https://github.com/chrisvxd/combine-pagination/compare/v0.3.0...v0.3.1) (2019-05-08) 15 | 16 | 17 | ### Bug Fixes 18 | 19 | * remove overly aggressive error that would trigger unexpectedly ([7169181](https://github.com/chrisvxd/combine-pagination/commit/7169181)) 20 | 21 | 22 | 23 | # [0.3.0](https://github.com/chrisvxd/combine-pagination/compare/v0.2.0...v0.3.0) (2019-05-07) 24 | 25 | 26 | ### Features 27 | 28 | * add support for custom sort method ([f8f7d24](https://github.com/chrisvxd/combine-pagination/commit/f8f7d24)) 29 | * export helper method for algolia's default relevance ranking ([1b3da92](https://github.com/chrisvxd/combine-pagination/commit/1b3da92)) 30 | * support nested sort keys ([aa0ceae](https://github.com/chrisvxd/combine-pagination/commit/aa0ceae)) 31 | * throw error if results are out of order ([7757b0c](https://github.com/chrisvxd/combine-pagination/commit/7757b0c)) 32 | 33 | 34 | 35 | # [0.2.0](https://github.com/chrisvxd/combine-pagination/compare/v0.1.0...v0.2.0) (2019-03-15) 36 | 37 | 38 | ### Features 39 | 40 | * add extractState and injectState methods for SSR ([ca35af1](https://github.com/chrisvxd/combine-pagination/commit/ca35af1)) 41 | * add method to get results for specific getter, using shared cache with getNext ([b41312a](https://github.com/chrisvxd/combine-pagination/commit/b41312a)) 42 | * hide private methods from export ([a29cbb3](https://github.com/chrisvxd/combine-pagination/commit/a29cbb3)) 43 | * support passing options into getNext() ([745063e](https://github.com/chrisvxd/combine-pagination/commit/745063e)) 44 | 45 | 46 | 47 | # [0.1.0](https://github.com/chrisvxd/combine-pagination/compare/ce525d2...v0.1.0) (2019-03-13) 48 | 49 | 50 | ### Bug Fixes 51 | 52 | * always return last hit during trimPage ([6760af3](https://github.com/chrisvxd/combine-pagination/commit/6760af3)) 53 | * capture all edge cases and replace all tests with getNext() tests ([295f832](https://github.com/chrisvxd/combine-pagination/commit/295f832)) 54 | * compare non-algolia results by using JSON stringify ([cf9f788](https://github.com/chrisvxd/combine-pagination/commit/cf9f788)) 55 | * don't duplicate hits ([86b9fd0](https://github.com/chrisvxd/combine-pagination/commit/86b9fd0)) 56 | * ensure results are always sorted ([8b29e64](https://github.com/chrisvxd/combine-pagination/commit/8b29e64)) 57 | * fix async by adding @babel/polyfill ([f871a66](https://github.com/chrisvxd/combine-pagination/commit/f871a66)) 58 | * fix sorting of last page ([15ae8e7](https://github.com/chrisvxd/combine-pagination/commit/15ae8e7)) 59 | * return results even if one getter returns an empty array ([3759ae6](https://github.com/chrisvxd/combine-pagination/commit/3759ae6)) 60 | 61 | 62 | ### Features 63 | 64 | * add initial code without tests ([ce525d2](https://github.com/chrisvxd/combine-pagination/commit/ce525d2)) 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import "@babel/polyfill"; 2 | 3 | const get = (obj, accessor) => accessor.split(".").reduce((o, i) => o[i], obj); 4 | 5 | /* 6 | * sortAlgolia(a: object, b: object, includeGeo: bool) 7 | * 8 | * Custom sort method for algolia searches using the default algolia search relevance 9 | **/ 10 | export const sortAlgolia = (a, b, includeGeo = true) => { 11 | const rankings = []; 12 | 13 | rankings.push({ name: "nbTypos", goal: "lowest" }); 14 | 15 | // Only compare geo if not remote 16 | if (includeGeo) { 17 | rankings.push({ name: "geoDistance", goal: "lowest" }); 18 | rankings.push({ name: "geoPrecision", goal: "highest" }); 19 | } 20 | 21 | rankings.push({ name: "firstMatchedWord", goal: "lowest" }); 22 | rankings.push({ name: "words", goal: "highest" }); 23 | rankings.push({ name: "filters", goal: "highest" }); 24 | rankings.push({ name: "proximityDistance", goal: "lowest" }); 25 | rankings.push({ name: "nbExactWords", goal: "highest" }); 26 | rankings.push({ name: "userScore", goal: "highest" }); 27 | 28 | for (let index = 0; index < rankings.length; index++) { 29 | const { name, goal } = rankings[index]; 30 | 31 | const score = 32 | goal === "lowest" 33 | ? a._rankingInfo[name] - b._rankingInfo[name] 34 | : b._rankingInfo[name] - a._rankingInfo[name]; 35 | 36 | if (score !== 0) { 37 | return score; 38 | } 39 | } 40 | 41 | return 0; 42 | }; 43 | 44 | /* 45 | * combinePagination() 46 | * */ 47 | export default ({ getters, sortKey, sort, sortDirection = "desc" }) => { 48 | let state = { 49 | pages: getters.map(() => []), 50 | getNext: { 51 | data: getters.map(() => []), 52 | meta: {}, 53 | nextPageForGetters: getters.map(() => 0) 54 | }, 55 | getNextForGetter: { 56 | nextPageForGetters: getters.map(() => 0) 57 | } 58 | }; 59 | 60 | const _getSortKey = hit => get(hit, sortKey); 61 | 62 | const _isAfter = (a, b, { eq = true } = {}) => { 63 | if (sort) { 64 | return eq ? sort(a, b) >= 0 : sort(a, b) > 0; 65 | } 66 | 67 | if (sortDirection === "asc") { 68 | return eq 69 | ? _getSortKey(a) >= _getSortKey(b) 70 | : _getSortKey(a) > _getSortKey(b); 71 | } 72 | 73 | return eq 74 | ? _getSortKey(a) <= _getSortKey(b) 75 | : _getSortKey(a) < _getSortKey(b); 76 | }; 77 | 78 | const _isBefore = (a, b, { eq = true } = {}) => { 79 | if (sort) { 80 | return eq ? sort(a, b) <= 0 : sort(a, b) < 0; 81 | } 82 | 83 | if (sortDirection === "asc") { 84 | return eq 85 | ? _getSortKey(a) <= _getSortKey(b) 86 | : _getSortKey(a) < _getSortKey(b); 87 | } 88 | 89 | return eq 90 | ? _getSortKey(a) >= _getSortKey(b) 91 | : _getSortKey(a) > _getSortKey(b); 92 | }; 93 | 94 | const _sortPage = hits => 95 | sort 96 | ? hits.sort(sort) 97 | : hits.sort((a, b) => 98 | sortDirection === "asc" 99 | ? get(a, sortKey) - get(b, sortKey) 100 | : get(b, sortKey) - get(a, sortKey) 101 | ); 102 | 103 | const _mergeData = data => 104 | _sortPage( 105 | data.reduce((acc, hitsForGetter) => [...acc, ...hitsForGetter], []) 106 | ); 107 | 108 | const _trimPage = ({ page, meta }) => { 109 | const { earliestLastHit, firstHit } = meta; 110 | 111 | const trimmedPage = page.filter( 112 | hit => _isAfter(hit, firstHit) && _isBefore(hit, earliestLastHit) 113 | ); 114 | 115 | return trimmedPage; 116 | }; 117 | 118 | const _getMeta = ({ currentMeta, results }) => { 119 | const lastHitForGetter = results[results.length - 1]; 120 | const { 121 | earliestLastHit = lastHitForGetter, 122 | firstHit = results[0] 123 | } = currentMeta; 124 | 125 | return { 126 | firstHit: _isBefore(results[0], firstHit, { eq: false }) 127 | ? results[0] 128 | : firstHit, 129 | earliestLastHit: _isBefore(lastHitForGetter, earliestLastHit, { 130 | eq: false 131 | }) 132 | ? lastHitForGetter 133 | : earliestLastHit 134 | }; 135 | }; 136 | 137 | const _shouldProcessPage = ({ data, page, getterIndex }) => 138 | data[getterIndex].length === 0 && page !== null; 139 | 140 | // NB Not hugely efficient 141 | const _tidyData = ({ data, trimmedPage }) => 142 | data.reduce( 143 | (acc, getterData) => [ 144 | ...acc, 145 | getterData.filter(hit => trimmedPage.indexOf(hit) === -1) 146 | ], 147 | [] 148 | ); 149 | 150 | const _getData = async ({ getterIndex, page, userOptions }) => { 151 | const getter = getters[getterIndex]; 152 | const cachedPage = state.pages[getterIndex][page]; 153 | 154 | let results; 155 | 156 | if (cachedPage) { 157 | results = cachedPage; 158 | } else { 159 | results = await getter(page, userOptions); 160 | state.pages[getterIndex].push(results); 161 | } 162 | 163 | return results; 164 | }; 165 | 166 | const getNext = async userOptions => { 167 | // We recalculate these meta params on each page 168 | state.getNext.meta.firstHit = undefined; 169 | state.getNext.meta.earliestLastHit = undefined; 170 | 171 | for (let getterIndex = 0; getterIndex < getters.length; getterIndex++) { 172 | const page = state.getNext.nextPageForGetters[getterIndex]; 173 | 174 | if ( 175 | _shouldProcessPage({ 176 | data: state.getNext.data, 177 | page, 178 | getterIndex 179 | }) 180 | ) { 181 | const results = await _getData({ 182 | getterIndex, 183 | page, 184 | userOptions 185 | }); 186 | 187 | if (results.length > 0) { 188 | state.getNext.data[getterIndex] = results; 189 | state.getNext.nextPageForGetters[getterIndex] = page + 1; 190 | state.getNext.meta = _getMeta({ 191 | currentMeta: state.getNext.meta, 192 | getterIndex, 193 | results 194 | }); 195 | } else { 196 | state.getNext.nextPageForGetters[getterIndex] = null; 197 | } 198 | } else { 199 | if (state.getNext.data[getterIndex].length > 0) { 200 | state.getNext.meta = _getMeta({ 201 | currentMeta: state.getNext.meta, 202 | getterIndex, 203 | results: state.getNext.data[getterIndex] 204 | }); 205 | } 206 | } 207 | } 208 | 209 | const page = _mergeData(state.getNext.data); 210 | 211 | const trimmedPage = 212 | page.length > 0 213 | ? _trimPage({ 214 | page, 215 | meta: state.getNext.meta 216 | }) 217 | : page; 218 | 219 | state.getNext.data = _tidyData({ data: state.getNext.data, trimmedPage }); 220 | 221 | return trimmedPage; 222 | }; 223 | 224 | const getNextForGetter = async (getterIndex, userOptions) => { 225 | const page = state.getNextForGetter.nextPageForGetters[getterIndex]; 226 | 227 | if (page === null) { 228 | return []; 229 | } 230 | 231 | const results = await _getData({ 232 | getterIndex, 233 | page, 234 | userOptions 235 | }); 236 | 237 | if (results.length === 0) { 238 | state.getNextForGetter.nextPageForGetters[getterIndex] = null; 239 | } else { 240 | state.getNextForGetter.nextPageForGetters[getterIndex] = page + 1; 241 | } 242 | 243 | return results; 244 | }; 245 | 246 | const extractState = () => { 247 | return state; 248 | }; 249 | 250 | const injectState = async injectedState => { 251 | state = injectedState; 252 | }; 253 | 254 | return { 255 | getNext, 256 | getNextForGetter, 257 | extractState, 258 | injectState 259 | }; 260 | }; 261 | -------------------------------------------------------------------------------- /src/__tests__/index.js: -------------------------------------------------------------------------------- 1 | import combinePagination from "../index"; 2 | import casual from "casual"; 3 | 4 | casual.seed(10); 5 | 6 | const modernHats = [ 7 | { 8 | name: "Baseball Cap", 9 | sorting: { popularity: 95 } 10 | }, 11 | { 12 | name: "Beanie", 13 | sorting: { popularity: 70 } 14 | }, 15 | { 16 | name: "Golf", 17 | sorting: { popularity: 20 } 18 | }, 19 | { 20 | name: "Other", 21 | sorting: { popularity: 10 } 22 | } 23 | ]; 24 | 25 | const oldHats = [ 26 | { 27 | name: "Top Hat", 28 | sorting: { popularity: 60 } 29 | }, 30 | { 31 | name: "Beret", 32 | sorting: { popularity: 15 } 33 | }, 34 | { 35 | name: "Bowler Cap", 36 | sorting: { popularity: 9 } 37 | }, 38 | { 39 | name: "Sombrero", 40 | sorting: { popularity: 5 } 41 | }, 42 | { 43 | name: "Stetson", 44 | sorting: { popularity: 2 } 45 | } 46 | ]; 47 | 48 | const getData = (data, page, pageSize = 3) => 49 | data.slice(page * pageSize, (page + 1) * pageSize); 50 | 51 | describe("combine-paginators", () => { 52 | let combinedGetters; 53 | 54 | beforeEach(() => { 55 | combinedGetters = combinePagination({ 56 | getters: [ 57 | page => getData(modernHats, page), 58 | page => getData(oldHats, page) 59 | ], 60 | sortKey: "sorting.popularity" 61 | }); 62 | }); 63 | 64 | describe("test data", () => { 65 | it("is valid test data, generating out of order results", () => { 66 | expect([ 67 | ...[...getData(modernHats, 0), ...getData(oldHats, 0)].sort( 68 | (a, b) => b.sorting.popularity - a.sorting.popularity 69 | ), 70 | ...[...getData(modernHats, 1), ...getData(oldHats, 1)].sort( 71 | (a, b) => b.sorting.popularity - a.sorting.popularity 72 | ) 73 | ]).toEqual([ 74 | modernHats[0], 75 | modernHats[1], 76 | oldHats[0], 77 | modernHats[2], 78 | oldHats[1], 79 | oldHats[2], 80 | modernHats[3], 81 | oldHats[3], 82 | oldHats[4] 83 | ]); 84 | }); 85 | }); 86 | 87 | describe("getNext", () => { 88 | it("get intersecting hits for first page of known data set", async () => { 89 | const page = await combinedGetters.getNext(); 90 | 91 | expect(page).toEqual([ 92 | modernHats[0], 93 | modernHats[1], 94 | oldHats[0], 95 | modernHats[2] 96 | ]); 97 | }); 98 | 99 | it("get intersecting hits for second page of known data set", async () => { 100 | await combinedGetters.getNext(); 101 | const page = await combinedGetters.getNext(); 102 | 103 | expect(page).toEqual([oldHats[1], modernHats[3]]); 104 | }); 105 | 106 | it("get hits for third page of known data set", async () => { 107 | await combinedGetters.getNext(); 108 | await combinedGetters.getNext(); 109 | const page = await combinedGetters.getNext(); 110 | 111 | expect(page).toEqual([oldHats[2]]); 112 | }); 113 | 114 | it("get trailing hits for fourth page of known data set", async () => { 115 | await combinedGetters.getNext(); 116 | await combinedGetters.getNext(); 117 | await combinedGetters.getNext(); 118 | const page = await combinedGetters.getNext(); 119 | 120 | expect(page).toEqual([oldHats[3], oldHats[4]]); 121 | }); 122 | 123 | it("return empty array when known data set is exhausted", async () => { 124 | await combinedGetters.getNext(); 125 | await combinedGetters.getNext(); 126 | await combinedGetters.getNext(); 127 | await combinedGetters.getNext(); 128 | const page = await combinedGetters.getNext(); 129 | 130 | expect(page).toEqual([]); 131 | }); 132 | 133 | it("should return data in correct order when using ascending sort direction", async () => { 134 | const reversedModernHats = [...modernHats].reverse(); 135 | const reversedOldHats = [...oldHats].reverse(); 136 | 137 | const combinedGettersWithSorting = combinePagination({ 138 | getters: [ 139 | page => getData(reversedModernHats, page), 140 | page => getData(reversedOldHats, page) 141 | ], 142 | sortKey: "sorting.popularity", 143 | sortDirection: "asc" 144 | }); 145 | 146 | const page = await combinedGettersWithSorting.getNext(); 147 | 148 | expect(page).toEqual([ 149 | reversedOldHats[0], 150 | reversedOldHats[1], 151 | reversedOldHats[2] 152 | ]); 153 | 154 | const nextPage = await combinedGettersWithSorting.getNext(); 155 | 156 | expect(nextPage).toEqual([ 157 | reversedModernHats[0], 158 | reversedOldHats[3], 159 | reversedModernHats[1], 160 | reversedOldHats[4] 161 | ]); 162 | }); 163 | 164 | // Randomly generating data sets ensures robustness against edge cases 165 | it("should return all results in order for 1000 random data sets", async () => { 166 | const minimumLength = 1; 167 | const maximumLength = 500; 168 | const minimumPopularity = 0; 169 | const maximumPopularity = 1000; 170 | const minimumPageSize = 1; 171 | const maximumPageSize = 100; 172 | const minimumNumberOfDataSets = 2; 173 | const maximumNumberOfDataSets = 5; 174 | 175 | for (let index = 0; index < 1000; index++) { 176 | // We ensure same maximum popularity is kept throughout run 177 | // This causes data to get distributed 178 | const maximumPopularityForRun = casual.integer( 179 | minimumPopularity, 180 | maximumPopularity 181 | ); 182 | 183 | const numberDataSets = casual.integer( 184 | minimumNumberOfDataSets, 185 | maximumNumberOfDataSets 186 | ); 187 | 188 | const dataSets = Array.from( 189 | { 190 | length: numberDataSets 191 | }, 192 | () => 193 | Array.from( 194 | { length: casual.integer(minimumLength, maximumLength) }, 195 | () => ({ 196 | sorting: { 197 | popularity: casual.integer( 198 | minimumPopularity, 199 | maximumPopularityForRun 200 | ) 201 | } 202 | }) 203 | ).sort((a, b) => b.sorting.popularity - a.sorting.popularity) 204 | ); 205 | 206 | const pageSizeForDataSets = Array.from( 207 | { 208 | length: numberDataSets 209 | }, 210 | () => casual.integer(minimumPageSize, maximumPageSize) 211 | ); 212 | 213 | const expectedResult = dataSets 214 | .reduce((acc, dataSet) => [...acc, ...dataSet], []) 215 | .sort((a, b) => b.sorting.popularity - a.sorting.popularity); 216 | 217 | const getters = dataSets.map((dataSet, index) => page => 218 | getData(dataSet, page, pageSizeForDataSets[index]) 219 | ); 220 | 221 | const combined = combinePagination({ 222 | getters, 223 | sortKey: "sorting.popularity" 224 | }); 225 | 226 | let lastResult; 227 | let allResults = []; 228 | 229 | while (lastResult !== []) { 230 | lastResult = await combined.getNext(); 231 | 232 | if (lastResult.length === 0) { 233 | break; 234 | } 235 | 236 | allResults = [...allResults, ...lastResult]; 237 | } 238 | 239 | expect(allResults).toEqual(expectedResult); 240 | } 241 | }); 242 | }); 243 | 244 | describe("getNextForGetter", () => { 245 | it("should get next results for a specific getter", async () => { 246 | const page = await combinedGetters.getNextForGetter(0); 247 | 248 | expect(page).toEqual([modernHats[0], modernHats[1], modernHats[2]]); 249 | }); 250 | 251 | it("should get next results from a specific getter, regardless of the getNext state", async () => { 252 | await combinedGetters.getNext(); 253 | const page = await combinedGetters.getNextForGetter(0); 254 | 255 | expect(page).toEqual([modernHats[0], modernHats[1], modernHats[2]]); 256 | }); 257 | 258 | it("should get results for a specific getter, without interfering with getNext", async () => { 259 | await combinedGetters.getNext(); 260 | await combinedGetters.getNextForGetter(1); 261 | await combinedGetters.getNextForGetter(1); 262 | const page = await combinedGetters.getNext(); 263 | 264 | expect(page).toEqual([oldHats[1], modernHats[3]]); 265 | }); 266 | 267 | it("should return [] when next page is null", async () => { 268 | await combinedGetters.injectState({ 269 | getNextForGetter: { nextPageForGetters: [null, null] } 270 | }); 271 | 272 | const page = await combinedGetters.getNextForGetter(1); 273 | 274 | expect(page).toEqual([]); 275 | }); 276 | 277 | it("should return [] when pages are exhausted", async () => { 278 | await combinedGetters.getNextForGetter(1); 279 | await combinedGetters.getNextForGetter(1); 280 | 281 | const page = await combinedGetters.getNextForGetter(1); 282 | 283 | expect(page).toEqual([]); 284 | }); 285 | }); 286 | }); 287 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # combine-pagination 🦑 2 | 3 | [![NPM](https://img.shields.io/npm/v/combine-pagination.svg)](https://www.npmjs.com/package/combine-pagination) [![JavaScript Style Guide](https://img.shields.io/badge/code_style-prettier-brightgreen.svg)](https://prettier.io) 4 | 5 | `combine-pagination` is a JavaScript library for paginating across multiple data sources at once, whilst retaining the sort order. 6 | 7 | - **Great for Infinity Scroll**: easily support multiple data sources in your infinity scroll. 8 | - **Retain order**: your data is always in order, even when it comes from different sources. 9 | - **Service agnostic**: work with any data service, whether REST, GraphQL, Algolia or another. 10 | - **Mix-and-match services**: mix data services as you see fit, making one query from GraphQL and one from Algolia. 11 | - **Efficient**: only fetch data when needed for that data source. 12 | 13 | Used in production at https://wellpaid.io. 14 | 15 | ## Installation 16 | 17 | ```sh 18 | npm i combine-pagination --save 19 | ``` 20 | 21 | or 22 | 23 | ```sh 24 | yarn add combine-pagination 25 | ``` 26 | 27 | ## Quick examples 28 | 29 | If you already understand [the problem](#the-problem), here are some quick examples for paginating across multiple data sets in different scenarios. 30 | 31 | ### Generic 32 | 33 | Paginate data from two generic data sets. 34 | 35 | ```js 36 | const combinedGetters = combinePagination({ 37 | getters: [page => getDataA(page), page => getDataB(page)], 38 | sortKey: "popularity" 39 | }); 40 | 41 | const pageOne = await combinedGetters.getNext(); 42 | const pageTwo = await combinedGetters.getNext(); 43 | ``` 44 | 45 | ### Algolia - custom sorting 46 | 47 | Paginate data from two distinct algolia queries where the Algolia Index is sorted by a custom field. Each query is using a different keyword. 48 | 49 | ```js 50 | const index = algoliasearch({ 51 | //... 52 | }).initIndex("hats"); 53 | 54 | const combinedGetters = combinePagination({ 55 | getters: [ 56 | async page => 57 | (await index.query({ page, hitsPerPage: 15, query: "Baseball cap" })) 58 | .hits, 59 | async page => 60 | (await index.query({ page, hitsPerPage: 15, query: "Top hat" })).hits 61 | ], 62 | sortKey: "popularity" 63 | }); 64 | 65 | const pageOne = await combinedGetters.getNext(); 66 | const pageTwo = await combinedGetters.getNext(); 67 | ``` 68 | 69 | ### Algolia - default sorting 70 | 71 | Paginate data from two distinct algolia queries where the Algolia Index is sorted using Algolia's default criteria. 72 | 73 | This approach uses a custom `sort` method that attempts to match [Algolia's sorting algorithm](https://www.algolia.com/doc/guides/managing-results/must-do/custom-ranking/?language=javascript#the-ranking-criteria). 74 | 75 | > The `sortAlgolia` sort method used in this example is experimental. You might need to implement your own if using a custom ranking method. 76 | 77 | ```js 78 | import { sortAlgolia } from "combine-pagination"; 79 | 80 | const index = algoliasearch({ 81 | getRankingInfo: true // Ask algolia for ranking info 82 | //... 83 | }).initIndex("hats"); 84 | 85 | const combinedGetters = combinePagination({ 86 | getters: [ 87 | async page => 88 | (await index.query({ page, hitsPerPage: 15, query: "Baseball cap" })) 89 | .hits, 90 | async page => 91 | (await index.query({ page, hitsPerPage: 15, query: "Top hat" })).hits 92 | ], 93 | sort: sortAlgolia 94 | }); 95 | ``` 96 | 97 | ## The Problem 98 | 99 | Suppose you have two data sets, `modernHats` and `oldHats`, and you want to combine them into one data set sorted by `popularity` called `hats`: 100 | 101 | ```js 102 | const modernHats = [ 103 | { 104 | name: "Baseball Cap", 105 | popularity: 95 106 | }, 107 | { 108 | name: "Beanie", 109 | popularity: 50 110 | }, 111 | { 112 | name: "Flat Cap", 113 | popularity: 20 114 | } 115 | ]; 116 | 117 | const oldHats = [ 118 | { 119 | name: "Top Hat", 120 | popularity: 85 121 | }, 122 | { 123 | name: "Beret", 124 | popularity: 15 125 | }, 126 | { 127 | name: "Bowler Hat", 128 | popularity: 9 129 | } 130 | ]; 131 | ``` 132 | 133 | In this example, we’ll be paginating 2 results at a time. Let’s create a getter to paginate our data: 134 | 135 | ```js 136 | const getData = (data, page) => data.slice((page - 1) * 2, page * 2); 137 | ``` 138 | 139 | > Note, in reality you probably already have a data getting with pagination support to retrieve the data via a server. 140 | 141 | Now let's get our data. Without using `combine-pagination`, we might be tempted to just paginate each, sort and combine them. **This is what NOT to do:** 142 | 143 | ```js 144 | const modernHatsPage = getData(modernHats, 0); 145 | const oldHatsPage = getData(oldHats, 0); 146 | 147 | const hats = [...modernHatsPage, ...oldHatsPage].sort( 148 | (a, b) => a.popularity - b.popularity 149 | ); 150 | ``` 151 | 152 | This will result in hats that looks like this 153 | 154 | ```js 155 | [ 156 | { 157 | name: "Baseball Cap", 158 | popularity: 95 159 | }, 160 | { 161 | name: "Top Hat", 162 | popularity: 85 163 | }, 164 | { 165 | name: "Beanie", 166 | popularity: 50 167 | }, 168 | { 169 | name: "Beret", 170 | popularity: 15 171 | } 172 | ]; 173 | ``` 174 | 175 | This looks fine, until you query the second page, which will look like this 176 | 177 | ```js 178 | [ 179 | { 180 | name: "Flat Cap", 181 | popularity: 20 182 | }, 183 | { 184 | name: "Bowler Hat", 185 | popularity: 9 186 | } 187 | ]; 188 | ``` 189 | 190 | If we combine these results, you’ll notice that now the **results are out of order**. Sure, we could re-sort our entire data set, but this has some problems: 191 | 192 | 1. Reordering UI is confusing - if we’re rendering `hats` in a UI, such as an infinity scroll, it will cause the UI to reorder and confuse the user. 193 | 2. Inefficient sort - re-sorting the entire data set on each pagination is highly inefficient. 194 | 3. Unnecessary data request - depending on the order of the data, getting both data sets at once might be unnecessary, especially if a network request is involved. 195 | 196 | ## The Solution 197 | 198 | Using a technique (currently) called [Framed Range Intersection](#framed-range-intersection), we can conservatively hold back trailing data from the first page that we think might overlap with subsequent pages. In the example above, it would mean holding back "Beret" until the next page is retrieved. 199 | 200 | `combine-pagination` implements this technique. Let's try again using the above data set: 201 | 202 | ```js 203 | import combinePagination from "combine-pagination"; 204 | 205 | const combinedGetters = combinePagination({ 206 | getters: [page => getData(modernHats, page), page => getData(oldHats, page)], 207 | sortKey: "popularity" 208 | }); 209 | ``` 210 | 211 | And query the first page: 212 | 213 | ```js 214 | const page = await combinedGetters.getNext(); 215 | ``` 216 | 217 | Resulting in: 218 | 219 | ```js 220 | [ 221 | { 222 | name: "Baseball Cap", 223 | popularity: 95 224 | }, 225 | { 226 | name: "Top Hat", 227 | popularity: 85 228 | }, 229 | { 230 | name: "Beanie", 231 | popularity: 50 232 | } 233 | ]; 234 | ``` 235 | 236 | As expected, we only received three results. `combine-pagination` is only showing intersecting data, holding "Beret" back until it receives the next data set. Because of this, you can't define exactly how many results you want to receive. See [Fuzzy Pagination](#fuzzy-pagination). 237 | 238 | The second time we run `getNext()`, we get the next set of data, but this time in the correct order: 239 | 240 | ```js 241 | [ 242 | { 243 | name: "Flat Cap", 244 | popularity: 20 245 | }, 246 | { 247 | name: "Beret", 248 | popularity: 15 249 | }, 250 | { 251 | name: "Bowler Hat", 252 | popularity: 9 253 | } 254 | ]; 255 | ``` 256 | 257 | `combine-pagination` noticed that "Beret", which was held back from the first set of results, intersects "Flat Cap" and "Bowler Hat", so has inserted it and sorted the page. 258 | 259 | That's it. Each time you call `getNext()`, you'll retreive the next set of sorted data until the getters are exhausted. 260 | 261 | ## Use cases 262 | 263 | - Using infinity scroll across multiple data sources. 264 | - Paginating across multiple Algolia queries, such as one geo location query and one not. 265 | - Paginating across different services. 266 | 267 | ## Framed Range Intersecting 268 | 269 | Intersecting ranges is a technique for finding values that overlap in two sets of data. For example: 270 | 271 | - Intersection of [0, 3] & [2, 4] is [2, 3] 272 | - Intersection of [-1, 34] & [0, 4] is [0, 4] 273 | - Intersection of [0, 3] & [4, 4] is empty set 274 | 275 | `combine-pagination` uses a technique called Framed Range Intersecting (name is WIP), a type of intersecting that determines the leading data set, and intersects the other data sets within that. 276 | 277 | Unlike normal range intersecting: 278 | 279 | - The first value is the first value of the leading data set. 280 | - The last value is either the last value of the leading data set, or the the last value of the intersecting data set that finishes first. 281 | - Values in multiple data sets are duplicated. 282 | 283 | For example: 284 | 285 | - Framed Intersection of [0, 3] & [2, 4] is [0, 2, 3] 286 | - Framed Intersection of [-1, 34] & [0, 4] is [-1, 0, 4] 287 | - Framed Intersection of [0, 3] & [4, 4] is [0, 3] 288 | - Framed Intersection of [0, 3] & [2, 4] & [1, 2] is [0, 1, 2, 2] 289 | 290 | ## Fuzzy Pagination 291 | 292 | Each time you execute `getNext()`, you can't be sure how many results you're going to receive. We call this **Fuzzy Pagination**, which returns `0 - n` results for any given page with page size `n`. This technique is best suited for infinity scroll type use cases. 293 | 294 | In normal pagination, you would receive `n` results for each page, only receiving `0 - n` results on the final page. 295 | 296 | ## License 297 | 298 | MIT © [Chris Villa](http://www.chrisvilla.co.uk) 299 | --------------------------------------------------------------------------------