├── .DS_Store ├── .env ├── .gitignore ├── LICENSE ├── NPM_PartialCache ├── approx_LRU.js ├── helperfunctions │ ├── checkCacheFunc.js │ ├── queryNormalizingFuncs.js │ └── setCacheFunc.js └── partialQuery.js ├── README.md ├── __tests__ ├── demoFuncTests.js └── supertest.js ├── demo ├── .DS_Store ├── client │ ├── .DS_Store │ ├── App.js │ ├── clientSideCache.js │ ├── components │ │ ├── BarChart.js │ │ ├── Footer.js │ │ ├── LLNode.js │ │ ├── LandingContent.js │ │ ├── LandingContent2.js │ │ ├── LineChart.js │ │ ├── NavBar.js │ │ ├── QueueVisualizer.js │ │ ├── RemovedLLNode.js │ │ ├── TeamMemberCard.js │ │ ├── Testimonials.js │ │ └── TextQuery.js │ ├── index.html │ ├── index.js │ ├── pages │ │ ├── About.js │ │ ├── Demo.js │ │ ├── Docs.js │ │ └── Landing.js │ ├── style.css │ └── styles │ │ ├── CachierNavBar(1).png │ │ ├── Demo.css │ │ ├── Demo.png │ │ ├── FastIcon.png │ │ ├── LLNode.css │ │ ├── QueueVisualizer.css │ │ ├── andrew.png │ │ ├── andy.png │ │ ├── arrow.png │ │ ├── cachierlogo.png │ │ ├── customizableIcon.png │ │ ├── dhruv.png │ │ ├── github.png │ │ ├── graphqlLogo.png │ │ ├── graphqlgif.gif │ │ ├── hua.png │ │ ├── installIcon.png │ │ ├── jonathan.png │ │ ├── kaju.png │ │ ├── linkedin.png │ │ ├── logo.png │ │ ├── memoryIcon.png │ │ ├── partialExampleFetch.png │ │ ├── roman.png │ │ ├── spritesheet (1).json │ │ ├── spritesheet (1).png │ │ └── stephen.png ├── server │ ├── .DS_Store │ ├── DemoFunc.js │ ├── DemoFunc.ts │ ├── cacheMoney.js │ ├── config │ │ └── db.js │ ├── models │ │ ├── Client.js │ │ └── Project.js │ ├── schema.js │ └── server.ts └── types.ts ├── dist ├── 1eb4c1e9323b71c1279e966e284e1416.gif ├── 29f842ac51c3b17ca07d25a7a09e9735.png ├── 2de6e7ef4ae9042ffd405c8dcbea9aa2.png ├── 2f4208e044c4212969a90d801ef2def8.png ├── 3734ecb3aeb09570f47139362d249be2.png ├── 5039e39a6d6ed8a3d80d43ca1d8c21ee.png ├── 56ea5f568f82e563dfd64ee6a897ef81.png ├── 6b51cea952681842ad6491b49cc56ac2.png ├── 7615be16eed41f806def1ba19d38b46d.png ├── 7664632ce349168853d72e7bb255189b.png ├── 7a6a6c54279a54d1977c9127957a503b.png ├── 852e2634ae6b8384f6fb01d089d92ff6.png ├── 8b89ce01929a419c56256c919e9a61c5.png ├── 8ee3e3d00d4f2f2b77e5cdf5c8d6fafd.png ├── 9a93576d1efc4d5a58034a34d531ec54.png ├── 9e46cba70d2c686f09a050cbce09c62b.png ├── a8a8f596e6871332d41dd5a6117a6427.png ├── a8dc608259608215f26a72fd33e96291.png ├── ac2e1e60fb0e4e4f77e02f9d9121c657.png ├── bundle.js ├── bundle.js.LICENSE.txt ├── c2206b27d7cd6ce4c5fef6b5c60389a6.png └── index.html ├── package-lock.json ├── package.json ├── postcss.config.js ├── tailwind.config.js ├── tsconfig.json └── webpack.config.js /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/Cachier/2e267d1765a9bf5f3d62e7a3c258ea0d3159ff9a/.DS_Store -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | NODE_ENV='production' 2 | PORT = 3000 3 | MONGO_URI = 'mongodb+srv://kajusarkar1:hushmail@cluster0.5n57uxr.mongodb.net/mgmt_db?retryWrites=true&w=majority' 4 | 5 | MONGO_USERNAME = 'kajusarkar@gmail.com' 6 | MONGO_PASSWORD = 'S7khuPkva2gaSESw' 7 | 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 OSLabs Beta 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /NPM_PartialCache/approx_LRU.js: -------------------------------------------------------------------------------- 1 | function approxLRU(cache, sampleSize) { 2 | const allKeys = Object.keys(cache); 3 | const samplePool = []; 4 | 5 | if (allKeys.length < sampleSize) { 6 | sampleSize = allKeys.length; 7 | } 8 | 9 | for (let i = 0; i < sampleSize; i++) { 10 | const randomNum = Math.floor(Math.random() * allKeys.length); 11 | const randomKey = allKeys[randomNum]; 12 | samplePool.push(randomKey); 13 | } 14 | let leastRecentlyUsedKey; 15 | let currLeastRecentDate = Infinity; 16 | for (let i = 0; i < samplePool.length; i++) { 17 | const currKey = samplePool[i]; 18 | if ( 19 | Array.isArray(cache[currKey]) && 20 | cache[currKey][cache[currKey].length - 1] < currLeastRecentDate 21 | ) { 22 | currLeastRecentDate = cache[currKey][cache[currKey].length - 1]; 23 | leastRecentlyUsedKey = currKey; 24 | } else if (cache[currKey]['__CachierCacheDate'] < currLeastRecentDate) { 25 | currLeastRecentDate = cache[currKey].__CachierCacheDate; 26 | leastRecentlyUsedKey = currKey; 27 | } 28 | } 29 | delete cache[leastRecentlyUsedKey]; 30 | 31 | return; 32 | } 33 | 34 | module.exports = approxLRU; 35 | -------------------------------------------------------------------------------- /NPM_PartialCache/helperfunctions/checkCacheFunc.js: -------------------------------------------------------------------------------- 1 | function checkCache(normalizedQuery, cache) { 2 | const { typesArr, cacheKeysArr, fieldsArr } = normalizedQuery; 3 | const resultData = { data: {} }; 4 | const currData = resultData.data; 5 | for (let i = 0; i < fieldsArr.length; i++) { 6 | let currCacheKey = cacheKeysArr[i]; 7 | iterateCache(fieldsArr[i], currCacheKey, currData, cache); 8 | depthCount = 0; 9 | } 10 | if (checkMissingData) { 11 | depthCount = 0; 12 | return resultData; 13 | } else { 14 | depthCount = 0; 15 | checkMissingData = true; 16 | return false; 17 | } 18 | } 19 | 20 | let depthCount = 0; 21 | let checkMissingData = true; 22 | 23 | function iterateCache(fieldArr, currCacheKey, currReturnData, currCache) { 24 | const currDepthObj = {}; 25 | let currCacheObj = currCache[currCacheKey]; 26 | depthCount++; 27 | currReturnData[currCacheKey] = currDepthObj; 28 | 29 | for (let j = 0; j < fieldArr.length; j++) { 30 | if (currCacheObj === undefined) { 31 | checkMissingData = false; 32 | return; 33 | } else { 34 | if (currCacheObj['__CachierCacheDate'] !== undefined) { 35 | currCacheObj['__CachierCacheDate'] = performance.now(); 36 | } 37 | if (Array.isArray(currCacheObj)) { 38 | const tempArr = []; 39 | let index = 0; 40 | 41 | for (let i = 0; i < currCacheObj.length - 1; i++) { 42 | tempArr.push( 43 | iterateCache( 44 | fieldArr, 45 | currCacheObj[index], 46 | currReturnData[currCacheKey], 47 | currCache 48 | ) 49 | ); 50 | if (tempArr[index] === false) { 51 | checkMissingData = false; 52 | return; 53 | } 54 | index++; 55 | } 56 | if (depthCount <= 1) { 57 | currCacheObj[currCacheObj.length - 1] = performance.now(); 58 | } 59 | currReturnData[currCacheKey] = tempArr; 60 | } else if (typeof fieldArr[j] === 'string') { 61 | if (currCacheObj[fieldArr[j]] === undefined) { 62 | checkMissingData = false; 63 | return; 64 | } 65 | currDepthObj[fieldArr[j]] = currCacheObj[fieldArr[j]]; 66 | 67 | if (currCacheObj.__CachierCacheDate !== undefined) { 68 | currCacheObj['__CachierCacheDate'] = performance.now(); 69 | } 70 | } else { 71 | const currNestedFieldName = Object.keys(fieldArr[j])[0]; 72 | const innerField = fieldArr[j][currNestedFieldName]; 73 | if (currCacheObj[currNestedFieldName] === null) { 74 | currDepthObj[currNestedFieldName] = null; 75 | } else { 76 | const result = iterateCache( 77 | innerField, 78 | currNestedFieldName, 79 | currDepthObj, 80 | currCacheObj 81 | ); 82 | if (result === false) { 83 | checkMissingData = false; 84 | return; 85 | } 86 | } 87 | } 88 | } 89 | } 90 | depthCount--; 91 | return currDepthObj; 92 | } 93 | 94 | module.exports = checkCache; 95 | -------------------------------------------------------------------------------- /NPM_PartialCache/helperfunctions/queryNormalizingFuncs.js: -------------------------------------------------------------------------------- 1 | function queryNormalizer(query, addType = true) { 2 | if (addType) { 3 | query = addTypenameField(query); 4 | } 5 | const outerQueryArr = seperateOuterQueries(query); 6 | const normalizedQueryObj = {}; 7 | normalizedQueryObj.typesArr = outerQueryArr.map((query) => checkType(query)); 8 | normalizedQueryObj.cacheKeysArr = outerQueryArr.map((query) => 9 | createKeyForVariableQuery(query) 10 | ); 11 | normalizedQueryObj.fieldsArr = outerQueryArr.map((query) => 12 | seperateInnerQueries(query) 13 | ); 14 | 15 | return normalizedQueryObj; 16 | } 17 | 18 | //Adds '__typename' field to every sub query. Helper function to ensure each subquery returns its typename which is crucial for creating unique keys in the cache. 19 | function addTypenameField(query) { 20 | let newQuery = ''; 21 | let bracketCount = 2; 22 | let index = 0; 23 | while (bracketCount > 0) { 24 | if (query[index] === '{') bracketCount--; 25 | newQuery += query[index++]; 26 | } 27 | 28 | newQuery += ' __typename'; 29 | 30 | while (query[index] !== undefined) { 31 | newQuery += query[index]; 32 | if (query[index] === '{') newQuery += ' __typename'; 33 | index++; 34 | } 35 | return newQuery; 36 | } 37 | 38 | function seperateOuterQueries(query) { 39 | const outerQueryArr = []; 40 | let index = 0; 41 | let mainbracketCount = 1; 42 | while (query[index] !== '{') { 43 | index++; 44 | } 45 | index++; 46 | while (mainbracketCount > 0) { 47 | let currQueryStr = '{'; 48 | let currInnerBracketCount = 1; 49 | while (query[index] !== '{') { 50 | if (query[index] === '}') { 51 | return outerQueryArr; 52 | } 53 | currQueryStr += query[index]; 54 | index++; 55 | } 56 | currQueryStr += query[index]; 57 | index++; 58 | mainbracketCount++; 59 | while (currInnerBracketCount > 0) { 60 | if (query[index] === '{') { 61 | currInnerBracketCount++; 62 | mainbracketCount++; 63 | } 64 | if (query[index] === '}') { 65 | currInnerBracketCount--; 66 | mainbracketCount--; 67 | } 68 | currQueryStr += query[index]; 69 | index++; 70 | } 71 | outerQueryArr.push((currQueryStr += '}')); 72 | } 73 | return outerQueryArr; 74 | } 75 | 76 | function checkVariable(query) { 77 | const variables = {}; 78 | let key = ''; 79 | let value = ''; 80 | let index = 0; 81 | 82 | while (query[index] !== '(') { 83 | if (query[index] === '}') return false; 84 | index++; 85 | } 86 | while (query[++index] !== ':') { 87 | key += query[index]; 88 | } 89 | while (query[++index] !== ')') { 90 | if (query[index] !== '"' && query[index] !== "'" && query[index] !== ' ') { 91 | value += query[index]; 92 | } 93 | } 94 | key = key.toLowerCase().trim(); 95 | variables[key] = value.trim(); 96 | 97 | return variables; 98 | } 99 | 100 | function checkType(query) { 101 | let index = 0; 102 | let str = ''; 103 | while (query[index] !== '{') { 104 | index++; 105 | } 106 | index++; 107 | while (query[index] !== '(' && query[index] !== '{') { 108 | if (query[index] !== ' ' && query.charCodeAt(index) !== 10) { 109 | str += query[index]; 110 | } 111 | index++; 112 | } 113 | 114 | return str; 115 | } 116 | 117 | function seperateInnerQueries(query) { 118 | const resultArr = []; 119 | let bracketCount = 2; 120 | let index = 0; 121 | let obj = {}; 122 | let tempStr = ''; 123 | 124 | while (bracketCount > 0) { 125 | if (query[index++] === '{') bracketCount--; 126 | } 127 | const helper = () => { 128 | while (bracketCount >= 0 && index < query.length) { 129 | if (query[index] === '}') { 130 | bracketCount--; 131 | index++; 132 | } 133 | 134 | while (query[index] === ' ' || query.charCodeAt(index) === 10) { 135 | ++index; 136 | } 137 | while ( 138 | query[index] !== ' ' && 139 | query.charCodeAt(index) !== 10 && 140 | index < query.length && 141 | query[index] !== '}' && 142 | query[index] !== '{' 143 | ) { 144 | tempStr += query[index]; 145 | ++index; 146 | } 147 | if (query[index] === '{') { 148 | bracketCount++; 149 | index++; 150 | const key = resultArr[resultArr.length - 1]; 151 | resultArr[resultArr.length - 1] = {}; 152 | resultArr[resultArr.length - 1][key] = []; 153 | let currArray = resultArr[resultArr.length - 1][key]; 154 | 155 | while (bracketCount >= 1) { 156 | let nestStr = ''; 157 | if (query[index] === '}') { 158 | bracketCount--; 159 | } 160 | while ( 161 | query[index] !== ' ' && 162 | query.charCodeAt(index) !== 10 && 163 | index < query.length && 164 | query[index] !== '}' && 165 | query[index] !== '{' 166 | ) { 167 | nestStr += query[index]; 168 | ++index; 169 | } 170 | 171 | if (query[index + 1] === '{') { 172 | bracketCount++; 173 | 174 | currArray.push({}); 175 | currArray[currArray.length - 1][nestStr] = []; 176 | currArray = currArray[currArray.length - 1][nestStr]; 177 | nestStr = ''; 178 | } else if (nestStr !== '') { 179 | currArray.push(nestStr); 180 | } 181 | index++; 182 | } 183 | // } 184 | } 185 | if (tempStr !== '') { 186 | resultArr.push(tempStr); 187 | tempStr = ''; 188 | } 189 | } 190 | 191 | index++; 192 | }; 193 | helper(); 194 | return resultArr; 195 | } 196 | 197 | function createKeyForVariableQuery(query) { 198 | const variable = Object.keys(checkVariable(query))[0]; 199 | const id = checkVariable(query)[variable]; 200 | const type = checkType(query); 201 | if (!id) return type; 202 | return `${type}:${id}`; 203 | } 204 | 205 | module.exports = { queryNormalizer, addTypenameField }; 206 | -------------------------------------------------------------------------------- /NPM_PartialCache/helperfunctions/setCacheFunc.js: -------------------------------------------------------------------------------- 1 | let depthCount = 0; 2 | 3 | function cacheNewData(normalizedQuery, data, cache, uniques) { 4 | const { cacheKeysArr, fieldsArr } = normalizedQuery; 5 | const currData = data.data; 6 | for (let i = 0; i < fieldsArr.length; i++) { 7 | let currCacheKey = cacheKeysArr[i]; 8 | iterateFieldsArr( 9 | fieldsArr[i], 10 | currCacheKey, 11 | currData, 12 | currCacheKey, 13 | cache, 14 | uniques 15 | ); 16 | depthCount = 0; 17 | } 18 | return; 19 | } 20 | 21 | function iterateFieldsArr( 22 | fieldArr, 23 | currCacheKey, 24 | data, 25 | dataKey, 26 | currCache, 27 | uniques 28 | ) { 29 | ++depthCount; 30 | let currDepthObj = {}; 31 | currCache[currCacheKey] = currDepthObj; 32 | 33 | if (Array.isArray(data[dataKey])) { 34 | const tempArr = []; 35 | let index = 0; 36 | 37 | data[dataKey].forEach((obj) => { 38 | const unique = uniques[currCacheKey] || 'id'; 39 | const objTypeName = `${obj.__typename}`; 40 | const uniqueKey = `${objTypeName.toLowerCase()}:${obj[unique]}`; 41 | 42 | tempArr.push(uniqueKey); 43 | 44 | currCache[uniqueKey] = iterateFieldsArr( 45 | fieldArr, 46 | index, 47 | data[dataKey], 48 | index, 49 | currCache[currCacheKey], 50 | uniques 51 | ); 52 | if (depthCount <= 1) { 53 | currCache[uniqueKey]['__CachierCacheDate'] = performance.now(); 54 | } 55 | index++; 56 | }); 57 | if (depthCount <= 1) { 58 | tempArr.push(performance.now()); 59 | } 60 | currCache[currCacheKey] = tempArr; 61 | } else { 62 | for (let j = 0; j < fieldArr.length; j++) { 63 | if (typeof fieldArr[j] === 'string') { 64 | currDepthObj[fieldArr[j]] = data[dataKey][fieldArr[j]]; 65 | if (depthCount <= 1) { 66 | currCache[currCacheKey]['__CachierCacheDate'] = performance.now(); 67 | } 68 | } else { 69 | if (depthCount <= 1) { 70 | currCache[currCacheKey]['__CachierCacheDate'] = performance.now(); 71 | } 72 | const currNestedFieldName = Object.keys(fieldArr[j])[0]; 73 | if (data[dataKey][currNestedFieldName] === null) { 74 | currCache[dataKey][currNestedFieldName] = null; 75 | } else { 76 | const innerField = fieldArr[j][currNestedFieldName]; 77 | iterateFieldsArr( 78 | innerField, 79 | currNestedFieldName, 80 | data[dataKey], 81 | currNestedFieldName, 82 | currCache[currCacheKey], 83 | uniques 84 | ); 85 | } 86 | } 87 | } 88 | } 89 | 90 | depthCount--; 91 | return currDepthObj; 92 | } 93 | 94 | module.exports = cacheNewData; 95 | -------------------------------------------------------------------------------- /NPM_PartialCache/partialQuery.js: -------------------------------------------------------------------------------- 1 | const fetch = (...args) => 2 | import('node-fetch').then(({ default: fetch }) => fetch(...args)); 3 | 4 | const checkCache = require('./helperfunctions/checkCacheFunc.js'); 5 | const { 6 | queryNormalizer, 7 | addTypenameField, 8 | } = require('./helperfunctions/queryNormalizingFuncs.js'); 9 | const cacheNewData = require('./helperfunctions/setCacheFunc.js'); 10 | const evictionPolicy = require('./approx_LRU'); 11 | 12 | function partialQueryCache( 13 | endpoint, 14 | capacity = 100, 15 | sampleSize = 5, 16 | evictionSize = 5 17 | ) { 18 | const cache = {}; 19 | return async function helper(req, res, next) { 20 | const { query, uniques } = req.body; 21 | const dataFromCache = checkCache(queryNormalizer(query, false), cache); 22 | if (dataFromCache !== false) { 23 | return res.json(dataFromCache); 24 | } else { 25 | const queryWithTypename = addTypenameField(query); 26 | fetch(endpoint, { 27 | method: 'POST', 28 | headers: { 'Content-type': 'application/json' }, 29 | body: JSON.stringify({ 30 | query: queryWithTypename, 31 | }), 32 | }) 33 | .then((response) => response.json()) 34 | .then((data) => { 35 | res.json(data); 36 | cacheNewData(queryNormalizer(query), data, cache, uniques); 37 | 38 | while (Object.keys(cache).length > capacity * 100) { 39 | for (let i = 0; i < evictionSize; i++) { 40 | evictionPolicy(cache, sampleSize); 41 | } 42 | } 43 | return; 44 | }); 45 | } 46 | }; 47 | } 48 | 49 | module.exports = partialQueryCache; 50 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | [![Contributors][contributors-shield]][contributors-url] 4 | [![Forks][forks-shield]][forks-url] 5 | [![Stargazers][stars-shield]][stars-url] 6 | [![Issues][issues-shield]][issues-url] 7 | [![MIT License][license-shield]][license-url] 8 | [![LinkedIn][linkedin-shield]][linkedin-url] 9 | 10 | 11 | 12 |
13 |
14 | 15 | Cachier Logo 16 | 17 | 18 |

Cachier

19 | 20 |

21 | GraphQL caching tool with custom eviction policies, cache normalization. 22 |
23 | Explore the docs » 24 |
25 |
26 | View Demo 27 | · 28 | Report Bug 29 | · 30 | Request Feature 31 |

32 |
33 | 34 | 35 |
36 | Table of Contents 37 |
    38 |
  1. 39 | About The Project 40 | 43 |
  2. 44 |
  3. 45 | Getting Started 46 | 50 |
  4. 51 |
  5. Usage
  6. 52 |
  7. Roadmap
  8. 53 |
  9. Contributing
  10. 54 |
  11. License
  12. 55 |
  13. Contact
  14. 56 |
  15. Acknowledgments
  16. 57 |
58 |
59 | 60 | 61 | ## About The Project 62 | 63 | ### Built With 64 | 65 | [![React][React.js]][React-url] 66 | [![Redis][Redis.io]][Redis-url] 67 | [![GraphQL][GraphQL.io]][GraphQL-url] 68 | [![Node/Express][Express.io]][Express-url] 69 | [![TailwindCSS][TailwindCSS.io]][Tailwind-url] 70 | 71 |

(back to top)

72 | 73 | Welcome to Cachier, a lightweight GraphQL caching tool that is configured specifically for GraphQL to reduce load times and minimize data fetching. 74 | 75 | GraphQL does not have native HTTP caching as a result of its singular employment of the POST method, forcing the danger of over-fetching by re-running queries and slowing load times. Our team of engineers developed a compact, easy-to-use solution that allows users to cache their queries on the server side and client side! 76 | 77 | #### Cachier currently offers: 78 | - Storage inside session storage for client side caching 79 | - Ability to choose between Redis and a native in memory cache 80 | - Unique key generation for response data to avoid developer having to tag for the cache 81 | - Partial and exact matching for query fields in the developer's GraphQL API 82 | - Highly configurable eviction policies 83 | 84 | 85 | #### We created a highly performant and customizable GraphQL caching library that consists of three main caching functions: 86 | - Cachier Normalized Server-side Cache 87 | - Cachier Direct Server-side Cache 88 | - Cachier Direct Client-side Cache 89 | 90 | We will go over each solution in detail below: 91 | 92 | 93 | ## Getting Started 94 | ## Cachier Normalized Server-side Cache 95 | 96 | Cachier's Normalized Server-side Cache breaks up GraphQL queries into individual sub-queries to be stored in the cache. This provides maximum cache efficency by organizing data in a way that prevents data redundancy and allows for partial retrievals of subset data, thus drastically reducing network requests to the database. 97 | 98 | ## Installation and Import 99 | If this is your first time using Cachier's Normalized Cache, run the following command in your terminal. 100 | ~~~ 101 | npm install @cachier/cache-partials 102 | ~~~ 103 | 104 | In your server file, require our middleware to handle GraphQL requests using the CommonJS format 105 | ~~~ 106 | const Cachier = require('@cachier/cache-partials'); 107 | ~~~ 108 | 109 | ## Set up your Cachier middleware function 110 | endpoint - the endpoint that the client will make GraphQL queries to if it wants to utilize the cache. 111 | #### graphQLEndpoint 112 | - The graphQLEndpoint parameter is where you will specify your GraphQL APIs endpoint. This allows Cachier to route all queries that are unable to be resolved by the Cachier Cache to your GraphQL API. 113 | #### cacheCapacity 114 | - the cacheCapacity parameter allows you to specify a maximum cache length which allows cachier to know when to evict from the cache. All inputs for Capacity will be multiples of 100. The default parameter for Capacity is 100 (1000 keys in the cache). 115 | #### sampleSize 116 | - the sampleSize parameter allows the developer to configure the number of random keys that will be considered for eviction. The default sampleSize is 5 which we recommend for most applications. 117 | #### evictionSize 118 | - the sampleSize parameter allows the developer to configure the number of evictions what will be made when your cache capacity is reached. The default evictionSize is 5. 119 | 120 | ~~~ 121 | app.use( 122 |        endpoint, 123 |        Cachier(graphQLEndPoint, cacheCapacity, sampleSize, evictionSize); 124 | ~~~ 125 | 126 |

(back to top)

127 | 128 | 129 | ## Usage 130 | 131 | ~~~ 132 | app.use( '/Cachier', Cachier('https://api.spacex.land/graphql', 100, 5, 5) ); 133 | ~~~ 134 | 135 | To fetch from Cachier's normalized cache you will fetch like you would to your GraphQL API except you will need set the option for uniques in the request body. The uniques object will need to contain a unique identifier for all list items in your query. You will need to include the list name as the key and the unique identifier as a the value. The unique identifier is any piece of data that is queried that is unique to each list item! 136 | 137 | ~~~ 138 | fetch('/graphql', { 139 | method: 'POST', 140 | headers:{ 141 | 'Content-Type': 'application/json', 142 | Accept: 'application/json', 143 | }, 144 | body: JSON.stringify({ 145 | query: queryGraphQLString, 146 | uniques: {listKey :uniqueIdentifier}, 147 | }) 148 | }); 149 | ~~~ 150 | 151 | 152 | ### How it works 153 | 154 | 155 | Example Fetch to SpaceX GQL API: 156 | 157 | ~~~ 158 | fetch('http://localhost:3000/partialCache', { 159 | method: 'POST', 160 | headers: { 161 | 'Content-Type': 'application/json', 162 | Accept: 'application/json', 163 | }, 164 | body: JSON.stringify({ 165 | query: { 166 | dragons { 167 | id 168 | return_payload_mass { 169 | kg 170 | } 171 | } 172 | } 173 | , 174 | uniques: { dragons: 'id' }, 175 | }), 176 | }) 177 | 178 | ~~~ 179 | 180 | 181 | The client will fetch to the Cachier Cache endpoint with an object containing the query string and the unique types. The unique types need contain a unique identifier for all array/list items so that Cachier can generate a unique cache key. 182 | 183 | ~~~ 184 | { 185 | "typesArr": [ 186 | "dragons" 187 | ], 188 | "fieldsArr": [ 189 | [ 190 | "__typename", 191 | "id", 192 | { 193 | "return_payload_mass": [ 194 | "__typename", 195 | "kg" 196 | ] 197 | } 198 | ] 199 | ] 200 | } 201 | ~~~ 202 | 203 | 204 | Cachier parses incoming GraphQL queries and seperates them into subqueries stored in a "Cachier" object. The queries are broken up into 2 arrays typesArr, and fieldsArr where their respective indexes connect with one another. FieldsArr will be an array of arrays containing the fields for each cacheKey, if a field is nested it will be stored as a nested object. We will then wait for the return Data and use this "Cachier" query object to sort the data into our cache. 205 | 206 | 207 | Here is data returned from our example query: 208 | ~~~ 209 | { 210 | "data": { 211 | "dragons": [ 212 | { 213 | "id": "dragon2", 214 | "return_payload_mass": { 215 | "kg": 3000 216 | } 217 | }, 218 | { 219 | "id": "dragon1", 220 | "return_payload_mass": { 221 | "kg": 3000 222 | } 223 | } 224 | ] 225 | } 226 | } 227 | ~~~ 228 | 229 | After receiving the data back Cachier will utilize the query map stored in the "Cachier" Object to normalize and store the data as individual keys inside the cache. This is how the data will look once normalized and stored in the cache: 230 | ~~~ 231 | { 232 | "dragons": [ 233 | "dragon:dragon2", 234 | "dragon:dragon1", 235 | 8492.694458007812 236 | ], 237 | "dragon:dragon2": { 238 | "__typename": "Dragon", 239 | "id": "dragon2", 240 | "return_payload_mass": { 241 | "__typename": "Mass", 242 | "kg": 3000 243 | }, 244 | "__CachierCacheDate": 8492.681999921799 245 | }, 246 | "dragon:dragon1": { 247 | "__typename": "Dragon", 248 | "id": "dragon1", 249 | "return_payload_mass": { 250 | "__typename": "Mass", 251 | "kg": 3000 252 | }, 253 | "__CachierCacheDate": 8492.691667079926 254 | } 255 | } 256 | ~~~ 257 | As you can see the dragons array now only stores references to keys in the cache and the data from the array is stored as seperate keys unique in the cache. This normalized cache structure eliminiates data redundancy in the cache and allows for partial retrieval of subset data. ("__CachierCacheData" fields and the number at the last index array is to keep track of recency for our eviction policy which we will speak about next). 258 | 259 | 260 | ### Approximated LRU Eviction 261 | 262 | Cachier's Normalized Cache uses a custom Approximated LRU Eviction Policy. This is not a true LRU implementation, but it comes very close in terms of performance. The reason Cachier does not use a true LRU implementation is because it costs more memory. Cachier's LRU policy works by creating a sample (the sample size can be configured by the developer) of randomly selected keys from the cache and evicting the least recently used key from the sample. 263 | 264 |

(back to top)

265 | 266 | ## Cachier Direct Server-side Cache 267 | Cachier's Direct Server-side Cache uses a custom LRU-SLFR (Least Recently Used Smallest Latency First Replacement) policy. LRU-SLFR is very similar to LRU except it takes latency into account as well as recency when evicting. Cachier's LRU-SLFR eviction policy utilizes a linked hash map to achieve true LRU and allows O(1) deletion, lookup, and insertion. Cachier takes latency into account as well as recency by creating a group of least recent queries and removes the query with the lowest latency first. This allows for much smarter evictions compared to traditional LRU. The whole group will be evicted first before moving on to the next group. Check out the demo page for a visualization of the eviction policy 268 | 269 | ### How to install and import 270 | If this is your first time using Cachier's Direct Server-side Cache, run the following command in your terminal. 271 | 272 | ~~~ 273 | npm install @cachier/server-side 274 | ~~~ 275 | 276 | In your server file, require our middleware to handle GraphQL requests using the CommonJS format 277 | 278 | ~~~ 279 | const Cachier = require('@cachier/server-side') 280 | ~~~ 281 | 282 | ### Set up your Cachier middleware function 283 | 284 | #### Endpoint 285 | - The endpoint that the client will make GraphQL queries to if it wants to utilize the cache. 286 | #### graphQLEndpoint 287 | - The graphQLEndpoint parameter is where you will specify your GraphQL APIs endpoint. This allows Cachier to route all queries that are unable to be resolved by the Cachier Cache to your GraphQL API. 288 | #### capacity 289 | - the cacheCapacity parameter allows you to specify a maximum cache length which allows cachier to know when to evict from the cache. 290 | #### groupSize 291 | - the groupSize parameter allows the developer to configure the number of least recently used keys that will be considered for eviction. The key with the least latency out of the group will be evicted first. The whole group will be evicted first before moving on to the next group. 292 | #### RedisClient 293 | - If you would like to use Redis to store your cache, insert your connected redis client as an arguement. If you leave out this parameter Cachier will default to its native built in cache. 294 | 295 | 296 | ~~~ 297 | app.use( 298 |        endpoint, 299 |        Cachier(graphqlEndpoint, capacity, groupSize, RedisClient(optional)); 300 | ~~~ 301 | 302 | Example implementation without Redis: 303 | 304 | ~~~ 305 | app.use( '/Cachier', Cachier('https://api.spacex.land/graphql', 100, 5) ); 306 | ~~~ 307 | 308 | 309 | ### If using Redis 310 | First, install the Redis package for Node.js 311 | 312 | `npm install redis` 313 | 314 | Then install redis npm package. 315 | `npm install _______` 316 | 1. Install Redis 317 | - MacOS users: [Redis installation for MacOS](https://redis.io/docs/getting-started/installation/install-redis-on-mac-os/) 318 | - Linux users: [Redis installation for Linux](https://redis.io/docs/getting-started/installation/install-redis-on-linux/) 319 | - Windows users: 320 | 1. Redis is not officially supported on Windows, so you must have a [Windows Subsystem for Linux](https://learn.microsoft.com/en-us/windows/wsl/install). 321 | 2. Once you have WSL, follow [Redis installation for Windows](https://redis.io/docs/getting-started/installation/install-redis-on-windows/) 322 | 323 | 324 | ## Cachier Direct Client-side Cache 325 | 326 | Cachier's Direct Client-Side Cache uses the same underlying mechanisms as Cachier's Direct Server-side cache except it stores the cache in the client browsers session storage. This allows for even faster cached query times than a server side implementation. Cachier's client side cache was built to mimic a traditional fetch request so it is very easy to integrate into new and existing codebases. 327 | 328 | ### Installation and Import 329 | 330 | If this is your first time using Cachier's Direct Client-side Cache, run the following command in your terminal. 331 | 332 | ~~~ 333 | npm install @cachier/client-side 334 | ~~~ 335 | 336 | In your client file, import the cachier client side function: 337 | 338 | ~~~ 339 | import clientSideCache from '@cachier/client-side'; 340 | ~~~ 341 | 342 | ### Initalize your Cachier client-side cache 343 | 344 | #### capacity 345 | - the cacheCapacity parameter allows you to specify a maximum cache length which allows cachier to know when to evict from the cache. 346 | #### groupSize 347 | - the groupSize parameter allows the developer to configure the number of least recently used keys that will be considered for eviction. The key with the least latency out of the group will be evicted first. The whole group will be evicted first before moving on to the next group. 348 | 349 | ~~~ 350 | const cachierFetch = clientSideCache(500, 5); 351 | ~~~ 352 | 353 | Operates exactly like fetch(): 354 | 355 | ~~~ 356 | cachierFetch('/graphql', { 357 | method: 'POST', 358 | headers:{ 359 | 'Content-Type': 'application/json', 360 | Accept: 'application/json', 361 | }, 362 | body: JSON.stringify({ 363 | query: queryGraphQLString, 364 | }) 365 | }); 366 | ~~~ 367 | 368 | 369 | ## Roadmap 370 | 371 | - [ ] Mutation handling 372 | - [ ] Full partial querying 373 | - [ ] Demo with more options 374 | - [ ] Faster text editor 375 | 376 | See the [open issues](https://github.com/oslabs-beta/Cachier/issues) for a full list of proposed features (and known issues). 377 | 378 |

(back to top)

379 | 380 | 381 | ## Contributing 382 | 383 | Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**. 384 | 385 | If you have a suggestion that would make this better, please fork the repo and create a pull request. You can also simply open an issue with the tag "enhancement". 386 | Don't forget to give the project a star! Thanks again! 387 | 388 | 1. Fork the Project 389 | 2. Create your Feature Branch (`git checkout -b feature/AmazingFeature`) 390 | 3. Commit your Changes (`git commit -m 'Add some AmazingFeature'`) 391 | 4. Push to the Branch (`git push origin feature/AmazingFeature`) 392 | 5. Open a Pull Request 393 | 394 |

(back to top)

395 | 396 | ## Tech stack used 397 | Node - Express - React - Tailwind CSS - ChartJS - Redis - GraphQL - TypeScript - Jest - Supertest - Webpack 398 | 399 | ## Here's how to contribute to our open source library 400 | Our vision for our open-source project is for fellow developers to be able to interate on and improve this tool. This is exactly where you and the community comes in. So, if you have an idea that can make Cachier better, you can make that idea come to life by following the steps below: 401 | 402 | 1. Fork Cachier 403 | 2. Pull down our dev branch with command ```git pull origin dev``` 404 | 3. Create your own Feature Branch with the command ```git checkout -b ``` 405 | 4. Add your changes with the command ```git add .``` 406 | 5. Stage and commit your changes with the command ```git commit -m ""``` 407 | 6. Merge your branch with the dev branch locally with the command ```git merge dev``` 408 | 7. Resolve any merge conflicts 409 | 8. Push up your branch with the command ```git push origin ``` 410 | 9. Open a pull request 411 | 412 | Please star our repo if you've found this useful, we want to be able to help as many of developers as we can! 413 | 414 | ## Contributors 415 | * Andy Zheng || [Github](https://github.com/andy5313) || [Linkedin](https://www.linkedin.com/in/andyzheng5313/) 416 | * Dhruv Thota || [Github](https://github.com/L05Dhruv) || [Linkedin](https://www.linkedin.com/in/dhruv-thota/) 417 | * Jonathan Chen || [Github](https://github.com/jchen0903i) || [Linkedin](https://www.linkedin.com/in/jonathan-chen3/) 418 | * Kaju Sarkar || [Github](https://github.com/kajusarkar) || [Linkedin](https://www.linkedin.com/in/kaju-sarkar-a6329862/) 419 | * Roman Darker || [Github](https://github.com/romanjamesd) || [Linkedin](https://www.linkedin.com/in/roman-darker-707147175/) 420 | 421 | ## Works cited 422 | - LRU based small latency first replacement (SLFR) algorithm for the proxy cache. (2003). Proceedings IEEE/WIC International Conference on Web Intelligence (WI 2003). https://doi.org/10.1109/wi.2003.1241250 423 | - Wang, Y., Yang, J., & Wang, Z. (2020). Dynamically Configuring LRU Replacement Policy in Redis. The International Symposium on Memory Systems. https://doi.org/10.1145/3422575.3422799 424 | - Morales, K., & Lee, B. K. (2012). Fixed Segmented LRU cache replacement scheme with selective caching. 2012 IEEE 31st International Performance Computing and Communications Conference (IPCCC). https://doi.org/10.1109/pccc.2012.6407712 425 | 426 | 427 | ## License 428 | Distributed under the MIT license. 429 | 430 | 431 | 432 | [contributors-shield]: https://img.shields.io/github/contributors/oslabs-beta/Cachier.svg?style=for-the-badge 433 | [contributors-url]: https://github.com/oslabs-beta/Cachier/graphs/contributors 434 | [forks-shield]: https://img.shields.io/github/forks/oslabs-beta/Cachier.svg?style=for-the-badge 435 | [forks-url]: https://github.com/oslabs-beta/Cachier/network/members 436 | [stars-shield]: https://img.shields.io/github/stars/oslabs-beta/Cachier.svg?style=for-the-badge 437 | [stars-url]: https://github.com/oslabs-beta/Cachier/stargazers 438 | [issues-shield]: https://img.shields.io/github/issues/oslabs-beta/Cachier.svg?style=for-the-badge 439 | [issues-url]: https://github.com/oslabs-beta/Cachier/issues 440 | [license-shield]: https://img.shields.io/github/license/oslabs-beta/Cachier.svg?style=for-the-badge 441 | [license-url]: https://github.com/oslabs-beta/Cachier/blob/master/LICENSE.txt 442 | [linkedin-shield]: https://img.shields.io/badge/-LinkedIn-black.svg?style=for-the-badge&logo=linkedin&colorB=555 443 | [linkedin-url]: https://linkedin.com/company/cachier 444 | [product-screenshot]: images/screenshot.png 445 | [React.js]: https://img.shields.io/badge/React-20232A?style=for-the-badge&logo=react&logoColor=61DAFB 446 | [React-url]: https://reactjs.org/ 447 | [Redis.io]: https://img.shields.io/badge/Redis-Data%20store-red 448 | [Redis-url]: https://redis.io/ 449 | [GraphQL.io]: https://img.shields.io/badge/%20-GraphQL-brightgreen 450 | [GraphQL-url]: https://graphql.org/learn/ 451 | [Express.io]: https://img.shields.io/badge/Node-Express-orange 452 | [Express-url]: https://expressjs.com/ 453 | [TailwindCSS.io]: https://img.shields.io/badge/%20UI-TailwindCSS%20-blue 454 | [Tailwind-url]: https://v2.tailwindcss.com/docs 455 | [Vue.js]: https://img.shields.io/badge/Vue.js-35495E?style=for-the-badge&logo=vuedotjs&logoColor=4FC08D 456 | [Vue-url]: https://vuejs.org/ 457 | [Angular.io]: https://img.shields.io/badge/Angular-DD0031?style=for-the-badge&logo=angular&logoColor=white 458 | [Angular-url]: https://angular.io/ 459 | 460 | -------------------------------------------------------------------------------- /__tests__/demoFuncTests.js: -------------------------------------------------------------------------------- 1 | this.capacity = capacity; 2 | this.groupSize = groupSize; 3 | this.redisClient = redisClient; 4 | this.endpoint = endpoint; 5 | 6 | // tests for CacheMoney function 7 | test("CacheMoney function should throw an error if capacity or groupSize is less than 1 or groupSize exceeds capacity", () => { 8 | expect(() => new CacheMoney(endpoint, 0, 1, redisClient)).toThrowError( 9 | "Capacity and groupSize needs to be a number greater than 1 and groupsize cannot exceed capacity" 10 | ); 11 | }); 12 | 13 | test("CacheMoney should create a new EvictionQueue", () => { 14 | const cacheMoney = new CacheMoney(endpoint, capacity, groupSize, redisClient); 15 | expect(cacheMoney.queue).toBeInstanceOf(EvictionQueue); 16 | }); 17 | 18 | test("CacheMoney should assign capacity, groupSize, redisClient and endpoint correctly", () => { 19 | const cacheMoney = new CacheMoney(endpoint, capacity, groupSize, redisClient); 20 | expect(cacheMoney.capacity).toBe(capacity); 21 | expect(cacheMoney.groupSize).toBe(groupSize); 22 | expect(cacheMoney.redisClient).toBe(redisClient); 23 | expect(cacheMoney.endpoint).toBe(endpoint); 24 | }); 25 | 26 | test("CacheMoney should have traverse function", () => { 27 | const cacheMoney = new CacheMoney(endpoint, capacity, groupSize, redisClient); 28 | expect(typeof cacheMoney.traverse).toBe('function'); 29 | }); 30 | 31 | describe('checkCache', () => { 32 | beforeEach(() => { 33 | const req = { 34 | body: { 35 | query: 'some query', 36 | variables: { some: 'variables' }, 37 | } 38 | }; 39 | res = jest.fn(); 40 | next = jest.fn(); 41 | }); 42 | 43 | test('should set valueFromCache if redisClient is used', async () => { 44 | const redisClient = { 45 | get: jest.fn().mockReturnValue('someValue') 46 | } 47 | await checkCache(req, res, next); 48 | expect(redisClient.get).toHaveBeenCalled(); 49 | expect(valueFromCache).toEqual('someValue'); 50 | }); 51 | 52 | test('should set valueFromCache if cacheMoneyCache is used', async () => { 53 | await checkCache(req, res, next); 54 | expect(cacheMoneyCache[cacheKey]).toBeDefined(); 55 | expect(valueFromCache).toEqual(cacheMoneyCache[cacheKey]); 56 | }); 57 | }); 58 | 59 | 60 | describe('Testing if cache contains the requested data', () => { 61 | it('should update the recency of the accessed cacheKey and return data from the cache', () => { 62 | const valueFromCache = '{"data": "dummy"}'; 63 | const cacheKey = 'cacheKey'; 64 | const queue = jest.fn(); 65 | const res = { 66 | send: jest.fn(), 67 | }; 68 | 69 | queue.updateRecencyOfExistingCache = jest.fn(); 70 | const listArray = jest.fn(); 71 | const parsedValue = JSON.parse(valueFromCache); 72 | queue.add = jest.fn(); 73 | queue.length = 10; 74 | queue.removeSmallestLatencyFromGroup = jest.fn(() => ({ 75 | latency: 0, 76 | num: 0, 77 | })); 78 | const redisClient = jest.fn(); 79 | redisClient.set = jest.fn(); 80 | const cacheMoneyCache = {}; 81 | const capacity = 10; 82 | const groupSize = 3; 83 | const currGroupSize = 3; 84 | 85 | if (valueFromCache) { 86 | // update recency of the accessed cacheKey by moving it to the front of the linked list. 87 | queue.updateRecencyOfExistingCache(cacheKey); 88 | // if cache contains the requested data return data from the cache. 89 | const listArray = traverse(queue); 90 | 91 | res.send({ 92 | data: parsedValue, 93 | queue: listArray, 94 | currGroupSize, 95 | cached: true, 96 | }); 97 | 98 | expect(res.send).toHaveBeenCalledWith({ 99 | data: parsedValue, 100 | queue: listArray, 101 | currGroupSize, 102 | cached: true, 103 | }); 104 | expect(queue.updateRecencyOfExistingCache).toHaveBeenCalledWith(cacheKey); 105 | } 106 | }); 107 | }); 108 | //accounts for if the minLatencyNodeInGroup is the tail of the list 109 | if (this.tail === minLatencyNodeInGroup) { 110 | this.tail = this.tail.prev; 111 | this.tail.next = null; 112 | this.length--; 113 | delete this.cache[minLatencyNodeInGroup.key]; 114 | return minLatencyNodeInGroup; 115 | } 116 | 117 | //handles the case where minLatencyNodeInGroup is in middle of the list 118 | minLatencyNodeInGroup.prev.next = minLatencyNodeInGroup.next; 119 | minLatencyNodeInGroup.next.prev = minLatencyNodeInGroup.prev; 120 | this.length--; 121 | delete this.cache[minLatencyNodeInGroup.key]; 122 | return minLatencyNodeInGroup; 123 | 124 | 125 | 126 | describe("add()", () => { 127 | let evictionQueue; 128 | beforeEach(() => { 129 | evictionQueue = new EvictionQueue(); 130 | }); 131 | 132 | it("Should add a new node to the head of the list", () => { 133 | const cacheKey = "test"; 134 | const latency = 10; 135 | evictionQueue.add(cacheKey, latency); 136 | expect(evictionQueue.head.latency).toBe(10); 137 | expect(evictionQueue.head.key).toBe("test"); 138 | }); 139 | }); 140 | 141 | describe("removeSmallestLatencyFromGroup()", () => { 142 | let evictionQueue; 143 | beforeEach(() => { 144 | evictionQueue = new EvictionQueue(); 145 | }); 146 | 147 | it("Should return the node with the smallest latency from the group", () => { 148 | evictionQueue.add("test1", 5); 149 | evictionQueue.add("test2", 10); 150 | evictionQueue.add("test3", 8); 151 | const minLatencyNodeInGroup = evictionQueue.removeSmallestLatencyFromGroup(3); 152 | expect(minLatencyNodeInGroup.key).toBe("test1"); 153 | expect(minLatencyNodeInGroup.latency).toBe(5); 154 | expect(evictionQueue.length).toBe(2); 155 | }); 156 | }); 157 | describe("updateRecencyOfExistingCache", () => { 158 | it("should update the recency of an existing cache", () => { 159 | const evictionQueue = new EvictionQueue(); 160 | evictionQueue.head = { 161 | key: "head", 162 | prev: null, 163 | next: { 164 | key: "middle", 165 | prev: { 166 | key: "tail", 167 | prev: null, 168 | next: null 169 | }, 170 | next: null 171 | } 172 | }; 173 | evictionQueue.tail = evictionQueue.head.next.prev; 174 | evictionQueue.cache = { 175 | head: evictionQueue.head, 176 | middle: evictionQueue.head.next, 177 | tail: evictionQueue.tail 178 | }; 179 | evictionQueue.length = 3; 180 | evictionQueue.updateRecencyOfExistingCache("middle"); 181 | expect(evictionQueue.head.key).toBe("middle"); 182 | expect(evictionQueue.head.prev).toBe(null); 183 | expect(evictionQueue.head.next).toEqual({ 184 | key: "head", 185 | prev: { key: "middle", prev: null, next: null }, 186 | next: { 187 | key: "tail", 188 | prev: { key: "head", prev: null, next: null }, 189 | next: null 190 | } 191 | }); 192 | expect(evictionQueue.tail.key).toBe("tail"); 193 | expect(evictionQueue.tail.next).toBe(null); 194 | expect(evictionQueue.tail.prev).toEqual({ 195 | key: "head", 196 | prev: { key: "middle", prev: null, next: null }, 197 | next: null 198 | }); 199 | expect(evictionQueue.length).toBe(3); 200 | }); 201 | }); 202 | -------------------------------------------------------------------------------- /__tests__/supertest.js: -------------------------------------------------------------------------------- 1 | const request = require('supertest'); 2 | const app = require('../demo/server/server'); 3 | const demoFunc = require('../demo/server/DemoFunc'); 4 | const CacheMoney = require('../demo/server/cacheMoney'); 5 | 6 | describe('GraphQL endpoint returns an object', () => { 7 | describe('/graphql', () => { 8 | it('is a json object with valid query', async () => { 9 | const response = await request(app) 10 | .post('/graphql') 11 | .send({ query: `{ clients { id name email phone } }` }) 12 | .expect('Content-Type', /application\/json/) 13 | .expect(200) 14 | .then((res) => { 15 | expect(typeof res).toBe('object'); 16 | expect(JSON.stringify(res)).toBe(JSON.stringify(res)); 17 | }); 18 | }, 15000); 19 | 20 | it('returns 400 status on invalid query', async () => { 21 | const response = await request(app) 22 | .post('/graphql') 23 | .send({ query: `` }) 24 | .expect(400); 25 | }); 26 | }); 27 | }); 28 | 29 | test('Demo func middleware check cache function', () => { 30 | expect(typeof demoFunc('http://localhost:3000/graphql', 4, 2)).toBe( 31 | 'function' 32 | ); 33 | }); 34 | 35 | test('Cachier caching returns check cache function', () => { 36 | expect(typeof CacheMoney('http://localhost:3000/graphql', 4, 2)).toBe( 37 | 'function' 38 | ); 39 | }); 40 | -------------------------------------------------------------------------------- /demo/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/Cachier/2e267d1765a9bf5f3d62e7a3c258ea0d3159ff9a/demo/.DS_Store -------------------------------------------------------------------------------- /demo/client/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/Cachier/2e267d1765a9bf5f3d62e7a3c258ea0d3159ff9a/demo/client/.DS_Store -------------------------------------------------------------------------------- /demo/client/App.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { BrowserRouter, Routes, Route } from 'react-router-dom'; 3 | import Demo from './pages/Demo'; 4 | import Landing from './pages/Landing.js'; 5 | import Docs from './pages/Docs'; 6 | import About from './pages/About'; 7 | import Footer from './components/Footer' 8 | import NavBar from './components/NavBar'; 9 | 10 | const App = () => { 11 | 12 | return ( 13 | 14 | 15 | 16 | }> 17 | }> 18 | }> 19 | }> 20 | 21 |