├── .DS_Store ├── .gitignore ├── LICENSE.md ├── README.md ├── assets ├── bundl-inline.svg └── bundl-logo-color.svg ├── bunDL-client ├── .gitignore ├── README.md ├── __test__ │ ├── bunCache.test.js │ ├── cacheKey.test.js │ └── extractAST.test.js ├── package.json └── src │ ├── bunCache.js │ └── helpers │ ├── cacheKeys.js │ ├── extractAST.js │ ├── pouchHelpers.js │ └── queryHelpers.js └── bunDL-server ├── package.json └── src ├── __test__ └── functest.js ├── bundl.js └── helpers ├── caching-logic.js ├── intercept-and-parse-logic.js ├── pouchdbHelpers.js ├── prototype-logic.js ├── queryObjectFunctions.js ├── redisConnection.js └── redisHelper.js /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/BunDL/4178f67f4c6965fda28e3af7ff97c46ab2411ef8/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore 2 | 3 | # Logs 4 | 5 | logs 6 | _.log 7 | npm-debug.log_ 8 | yarn-debug.log* 9 | yarn-error.log* 10 | lerna-debug.log* 11 | .pnpm-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | 15 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json 16 | 17 | # Runtime data 18 | 19 | pids 20 | _.pid 21 | _.seed 22 | \*.pid.lock 23 | 24 | # Directory for instrumented libs generated by jscoverage/JSCover 25 | 26 | lib-cov 27 | 28 | # Coverage directory used by tools like istanbul 29 | 30 | coverage 31 | \*.lcov 32 | 33 | # nyc test coverage 34 | 35 | .nyc_output 36 | 37 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 38 | 39 | .grunt 40 | 41 | # Bower dependency directory (https://bower.io/) 42 | 43 | bower_components 44 | 45 | # node-waf configuration 46 | 47 | .lock-wscript 48 | 49 | # Compiled binary addons (https://nodejs.org/api/addons.html) 50 | 51 | build/Release 52 | 53 | # Dependency directories 54 | 55 | node_modules/ 56 | jspm_packages/ 57 | 58 | # Snowpack dependency directory (https://snowpack.dev/) 59 | 60 | web_modules/ 61 | 62 | # TypeScript cache 63 | 64 | \*.tsbuildinfo 65 | 66 | # Optional npm cache directory 67 | 68 | .npm 69 | 70 | # Optional eslint cache 71 | 72 | .eslintcache 73 | 74 | # Optional stylelint cache 75 | 76 | .stylelintcache 77 | 78 | # Microbundle cache 79 | 80 | .rpt2_cache/ 81 | .rts2_cache_cjs/ 82 | .rts2_cache_es/ 83 | .rts2_cache_umd/ 84 | 85 | # Optional REPL history 86 | 87 | .node_repl_history 88 | 89 | # Output of 'npm pack' 90 | 91 | \*.tgz 92 | 93 | # Yarn Integrity file 94 | 95 | .yarn-integrity 96 | 97 | # dotenv environment variable files 98 | 99 | .env 100 | .env.development.local 101 | .env.test.local 102 | .env.production.local 103 | .env.local 104 | 105 | # parcel-bundler cache (https://parceljs.org/) 106 | 107 | .cache 108 | .parcel-cache 109 | 110 | # Next.js build output 111 | 112 | .next 113 | out 114 | 115 | # Nuxt.js build / generate output 116 | 117 | .nuxt 118 | dist 119 | 120 | # Gatsby files 121 | 122 | .cache/ 123 | 124 | # Comment in the public line in if your project uses Gatsby and not Next.js 125 | 126 | # https://nextjs.org/blog/next-9-1#public-directory-support 127 | 128 | # public 129 | 130 | # vuepress build output 131 | 132 | .vuepress/dist 133 | 134 | # vuepress v2.x temp and cache directory 135 | 136 | .temp 137 | .cache 138 | 139 | # Docusaurus cache and generated files 140 | 141 | .docusaurus 142 | 143 | # Serverless directories 144 | 145 | .serverless/ 146 | 147 | # FuseBox cache 148 | 149 | .fusebox/ 150 | 151 | # DynamoDB Local files 152 | 153 | .dynamodb/ 154 | 155 | # TernJS port file 156 | 157 | .tern-port 158 | 159 | # Stores VSCode versions used for testing VSCode extensions 160 | 161 | .vscode-test 162 | 163 | # yarn v2 164 | 165 | .yarn/cache 166 | .yarn/unplugged 167 | .yarn/build-state.yml 168 | .yarn/install-state.gz 169 | .pnp.\* 170 | 171 | # IntelliJ based IDEs 172 | .idea 173 | 174 | # Finder (MacOS) folder config 175 | .DS_Store 176 | 177 | .env 178 | 179 | functions/ 180 | .prettierrc 181 | bun.lockb 182 | dump.rdb 183 | .DS_Store 184 | .vscode/launch.json 185 | bun.lockb 186 | dump.rdb 187 | 188 | 189 | 190 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [2023] [bunDL] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |
4 |
5 | 6 | ![AppVeyor](https://img.shields.io/badge/version-1.0.0-blue.svg?style=for-the-badge&labelColor=B5A886&color=5A2A27) [![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=for-the-badge&labelColor=B5A886&color=5A2A27)](https://github.com/open-source-labs/bunDL/issues) ![LicenseMIT](https://img.shields.io/badge/License-MIT-green?style=for-the-badge&labelColor=B5A886&color=5A2A27&link=https%3A%2F%2Fopensource.org%2Flicense%2Fmit%2F) 7 | 8 |
9 | 10 | 11 | 12 | 13 | 14 | # bunDL 15 | 16 |

bundl logo is an intuitive, skinny GraphQL interceptor, that checks for cached data, handles mutations with PouchDB, and only sends modified or non-cached queries to the server. The returned data is then cached for future requests.

17 | 18 |
19 | 20 | ![Bun](https://img.shields.io/badge/Bun-%23000000.svg?style=for-the-badge&logo=bun&logoColor=white) ![JavaScript](https://img.shields.io/badge/javascript-%23323330.svg?style=for-the-badge&logo=javascript&logoColor=%23F7DF1E) ![Redis](https://img.shields.io/badge/redis%20Stack-%23DD0031.svg?&style=for-the-badge&logo=redis&logoColor=white) ![GraphQL](https://img.shields.io/badge/-GraphQL-E10098?style=for-the-badge&logo=graphql&logoColor=white) ![MongoDB](https://img.shields.io/badge/Mongo%20DB-%234ea94b.svg?style=for-the-badge&logo=mongodb&logoColor=white) 21 | ![pouchDB](https://img.shields.io/badge/pouch%20db-green?style=for-the-badge&logo=data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4KPCEtLSBHZW5lcmF0b3I6IEFkb2JlIElsbHVzdHJhdG9yIDI4LjAuMCwgU1ZHIEV4cG9ydCBQbHVnLUluIC4gU1ZHIFZlcnNpb246IDYuMDAgQnVpbGQgMCkgIC0tPgo8c3ZnIHZlcnNpb249IjEuMSIgaWQ9IkxheWVyXzEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiIHg9IjBweCIgeT0iMHB4IgoJIHZpZXdCb3g9IjAgMCA1MTIgNTEyIiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCA1MTIgNTEyOyIgeG1sOnNwYWNlPSJwcmVzZXJ2ZSI+CjxzdHlsZSB0eXBlPSJ0ZXh0L2NzcyI+Cgkuc3Qwe2ZpbGw6IzZDQ0I5OTt9Cjwvc3R5bGU+CjxwYXRoIGNsYXNzPSJzdDAiIGQ9Ik05MC4xLDZjMC4yLDAsMC4zLDAsMC41LDBjMzYuNSw0OC4zLDczLDk2LjcsMTA5LjUsMTQ1LjFjMS40LDEuOCwyLjksMi41LDUuMSwyLjRjMzQuNy0wLjEsNjkuNS0wLjEsMTA0LjIsMAoJYzIuNiwwLDQuMi0wLjgsNS44LTIuOUMzNTAuNSwxMDMuMywzODYsNTYuMSw0MjEuNCw4LjljMC43LTAuOSwxLjItMS45LDEuOC0yLjljMC4yLDAsMC4zLDAsMC41LDBjMC40LDIsMC45LDMuOSwxLjIsNS45CgljNi45LDM2LjgsMTMuNyw3My42LDIwLjYsMTEwLjVjMS40LDcuMywyLjksMTQuNiw0LjMsMjEuOGMwLDAuMiwwLDAuMywwLDAuNWMtMC42LDAuNS0xLjIsMS4xLTEuOCwxLjYKCWMtMjQuNywyNC4yLTQ5LjQsNDguNC03NC4yLDcyLjVjLTIuOSwyLjgtMi45LDIuOC0wLjksNi4yYzE2LjYsMjkuNCwzMy4yLDU4LjgsNDkuOSw4OC4yYzEuMSwxLjksMS4zLDMuMywwLjIsNS4zCgljLTM1LjEsNjEuNS03MC4yLDEyMy4xLTEwNS4zLDE4NC42Yy0xLjEsMi0yLjQsMi44LTQuNywyLjhjLTM3LjItMC4xLTc0LjUtMC4xLTExMS43LDBjLTIuMywwLTMuNi0wLjgtNC44LTIuOAoJYy0zNC43LTYxLjMtNjkuNS0xMjIuNi0xMDQuMy0xODMuOGMtMS40LTIuNS0xLjUtNC4zLDAtNi44YzE2LjYtMjkuNSwzMy4xLTU5LDQ5LjgtODguNWMxLjEtMS45LDAuOS0zLTAuNy00LjUKCWMtMjUuOS0yNC40LTUxLjctNDguOC03Ny42LTczLjJjLTEuMy0xLjMtMS43LTIuNC0xLjMtNC4zQzY5LjgsMTA2LjIsNzcuMSw3MC4xLDg0LjUsMzRDODYuNCwyNC43LDg4LjIsMTUuMyw5MC4xLDZ6Ii8+Cjwvc3ZnPgo=&logoColor=6ccb99&color=555555) 22 | 23 | ![CouchDB](https://img.shields.io/badge/Couch%20DB-DB?style=for-the-badge&logo=apachecouchdb&logoColor=%23E42528&color=black) ![IBM Cloudant](https://img.shields.io/badge/IBM%20Cloud-1261FE?style=for-the-badge&logo=IBM%20Cloud&logoColor=white) ![React](https://img.shields.io/badge/React-20232A?style=for-the-badge&logo=react&logoColor=61DAFB) ![Prettier](https://img.shields.io/badge/prettier-1A2C34?style=for-the-badge&logo=prettier&logoColor=F7BA3E) ![ESLint](https://img.shields.io/badge/eslint-3A33D1?style=for-the-badge&logo=eslint&logoColor=white) 24 | 25 | 26 | 27 |
28 | 29 | 30 | 31 |

Installation

32 | 33 | ### Install bunDL-server 34 | 35 | ```bash 36 | bun install bundl-server 37 | ``` 38 | 39 | ### Install bunDL-cache 40 | 41 | ```bash 42 | bun install bundl-cache 43 | ``` 44 | 45 | 46 | 47 |

Features:

48 | 49 | ## 🗂️ Caching with Redis Stack (Server) & LRU Cache (Client) 50 | 51 | retrieves data from the local cache with lightning speed 52 | 53 | 🥟 In-Memory Storage: Fast access to frequently used data. 54 | 55 | 🥟 Disk-Based Storage: Suitable for larger datasets that don't fit into memory. 56 | 57 | 🥟 Time-to-Live (TTL): Automatic eviction of old data based on time or size constraints. 58 | 59 |
60 | 61 | ## 🗄️ Database Integration 62 | 63 | seamlessly integrates with both relational and document based databases. 64 | 65 | 🥟 SQL Support: Easily connect to MySQL, PostgreSQL, and SQLite databases. 66 | 67 | 🥟 NoSQL Support: Options for integrating with MongoDB, Redis, and other NoSQL databases. 68 | 69 | 🥟 Syncing with PouchDB and CouchDB to provide offline access to data 70 | 71 |
72 | 73 | ## 🔎 Query Optimization 74 | 75 | ensures 76 | 77 | 🥟 Lazy Loading: Fetch only the data that is needed, reducing initial load times. 78 | 79 | 🥟 Batch Processing: Perform bulk operations for improved efficiency. 80 | 81 | 🥟 Indexing: Speed up data retrieval operations with intelligent indexing. 82 | 83 |
84 | 85 | ## 🎁 Plus More! 86 | 87 | 🥟 RESTful API: Easy integration with other services. 88 | 89 | 🥟 Data Validation: Robust validation mechanisms to ensure data integrity. 90 | 91 | 🥟 Real-Time Analytics: Keep track of various metrics in real-time. 92 | 93 |
94 | 95 | --- 96 | 97 | 98 | 99 | ## Server Side Implementation: 100 | 101 | makes use of [Redis Stack](https://redis.io/docs/install/install-stack/) for caching data as JSON objects; [Redis Stack](https://redis.io/docs/install/install-stack/) needs to be installed independently. Installation instructions can be found on the Redis website [here](https://redis.io/docs/install/install-stack/). 102 | 103 | ### 1️⃣ Install Bun runtime 104 | 105 | ```bash 106 | npm install -g bun 107 | ``` 108 | 109 | ### 2️⃣ Install : 110 | 111 | ```bash 112 | bun install bundl-server 113 | ``` 114 | 115 | ### 3️⃣ Define your schema shape 116 | 117 | #### For Unix / Linux / macOS: 118 | 119 | Open a terminal command line in the root directory folder that houses your server file. 120 | 121 | Run the following commands: 122 | 123 | ```bash 124 | touch .env # will create a new `.env` file if one doesn't exist 125 | echo "QUERY=\"[enter your query here]\"" >> .env 126 | ``` 127 | 128 | For example, if your GraphQL schema looks like this: 129 | 130 | ```graphql 131 | query samplePokeAPIquery { 132 | pokemon_v2_pokemon_by_pk(id: ) { 133 | name 134 | id 135 | height 136 | base_experience 137 | weight 138 | pokemon_v2_pokemonsprites { 139 | id 140 | pokemon_id 141 | sprites 142 | } 143 | } 144 | } 145 | 146 | ``` 147 | 148 | Your `QUERY="[...]"` input will look like this: 149 | 150 | ```bash 151 | touch .env // This will create a new `.env` file if one doesn't exist 152 | echo "QUERY=\"{ pokemon_v2_pokemon_by_pk(id: ) { name id height base_experience weight pokemon_v2_pokemonsprites { id pokemon_id sprites } } }\"" >> .env 153 | ``` 154 | 155 | 156 | 157 | ## Client Side Documentation: 158 | 159 | works best with [GraphQL](https://graphql.org/), [LRU Cache](https://github.com/isaacs/node-lru-cache#readme), [PouchDB](https://github.com/pouchdb/pouchdb), and CouchDB style database. 160 | 161 | For information on the client side implementation, please visit the [bunDL Client README](https://github.com/oslabs-beta/BunDL/tree/main/bunDL-client) 162 | 163 | 164 | 165 | ## Contributing 166 | 167 | We believe in the power of open source. By contributing to bunDL, you're not just making an impact on this project but also supporting the wider open source community. Our mission with bunDL is to create an accessible tool, and every contribution, big or small, pushes this vision forward. 168 | 169 | This project, bunDL, is an open source endeavor. If you're looking to understand our project's journey and how to contribute, visit our [Demo Repository](https://github.com/bunDL-demo/bunDL-demo) 170 | 171 | --- 172 | 173 | 174 | 175 | ## BunDL Contributors 176 | 177 | Accelerated by [OS Labs](https://github.com/open-source-labs) and devloped by [Ken Iwane](https://www.linkedin.com/in/ken-iwane-5b9209157/), [Shi Kuang](https://www.linkedin.com/in/shi-kuang/), [Brandon Do](https://www.linkedin.com/in/brandonndo/), [Gio Mogi](https://www.linkedin.com/in/giovanni-mogi-189013193/), & [Andrew Wicker](https://www.linkedin.com/in/andrewwicker/). 178 | -------------------------------------------------------------------------------- /assets/bundl-inline.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 8 | 9 | 10 | 12 | 14 | 16 | 17 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /assets/bundl-logo-color.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /bunDL-client/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /bunDL-client/README.md: -------------------------------------------------------------------------------- 1 | # bundl-cache 2 | 3 | bundl-cache is a client-side GraphQL caching solution, optimized for the Bun runtime. Our product is designed to intercept GraphQL queries, parse through relevant information from the AST, and generate unique cache key/value pairs to accomodate for an array of query types all within the browser. BunDL is most optimal when utilizing pouchDB and couchDB. The offline synchronization between the two databases allowed us to further reduce requests to the server and provide a seamless experience for the user in terms of performance speeds. 4 | 5 | ## Installation 6 | 7 | Within your terminal, download bundl-cache with 'bun install bundl-cache' 8 | 9 | ## Implementation 10 | 11 | 1. Import BunDL from 'bundl-cache' 12 | 2. Create a new instance of 'bundl-cache' 13 | 3. Set configurations based on your caching needs 14 | 4. Replace any fetch requests with 'bunDL.query' 15 | 16 | Example: 17 | 18 | If a user queries the following code below... 19 | 20 | ```js 21 | const query = `query { 22 | company (id: 123) { 23 | name 24 | city 25 | state 26 | department { 27 | name 28 | } 29 | } 30 | }`; 31 | ``` 32 | 33 | A typical fetch request may look like this: 34 | 35 | ```js 36 | fetch('/graphql', { 37 | method: 'POST', 38 | body: JSON.stringify({ query }), 39 | headers: { 'Content-Type': 'application/json' }, 40 | }); 41 | ``` 42 | 43 | With bunDL your fetch request would now look like the following: 44 | 45 | ```js 46 | const BunDL = new BunDL(); 47 | 48 | BunDL.query('/graphQL', query).then(/* use parsed response */); 49 | ``` 50 | 51 | Before creating a new instance of BunDL, you may also pass in specific configurations. However, this feature is currently in beta and may exhibit unintended bugs. We encourage users to stick with default configurations for the time being. 52 | 53 | Currently, the default configurations are: 54 | 55 | ```js 56 | const defaultConfig = { 57 | cacheMetadata: false, 58 | cacheVariables: true, 59 | requireArguments: true, 60 | }; 61 | ``` 62 | 63 | Setting 'cacheMetadata' to true will reconfigure bunDL to store additional information about the query (This may decrease performance speeds) 64 | Setting 'cacheVariables' to false will reconfigure bunDL to cache queries without variables 65 | Setting 'requireArguments' to false will reconfigure bunDL to cache queries without arguments. 66 | 67 | To set your own configurations, initialize an object with any or all of the default configurations and change the boolean to your desired option. 68 | 69 | ## Usage Notes 70 | 71 | - bunDL is around 94% faster than fetching the same data without caching over a network request. However, our current limitations lies within the granularity of our caching solutions. The bunDL developers are always looking to improve our product, and any support/contributions from the open source community is always welcomed. 72 | 73 | - bunDL can only cache 1-2-depth queries with arguments/variables 74 | - Any mutated queries will invalidate the entire cache 75 | - Deeply nested (3-depth+) queries will not be cached 76 | - Partial queries works at the single depth level and inconsistently works on the 2-depth level 77 | -------------------------------------------------------------------------------- /bunDL-client/__test__/bunCache.test.js: -------------------------------------------------------------------------------- 1 | import { expect, test, describe, beforeAll, beforeEach } from 'bun:test'; 2 | import { graphql } from 'graphql'; 3 | import BunCache from './bunCache.js'; 4 | import { 5 | generateGraphQLQuery, 6 | generateMissingLRUCachekeys, 7 | mergeGraphQLresponses, 8 | updateMissingCache, 9 | generateMissingPouchDBCachekeys, 10 | updatePouchDB, 11 | } from './helpers/queryHelpers'; 12 | const PouchDB = require('pouchdb'); 13 | 14 | 15 | describe('1 depth test', () => { 16 | test.skip('missing cachekeys correctly generated', async () => { 17 | const newBun = new BunCache(); 18 | const address = { 19 | id: '234', 20 | street: '123 codesmith st', 21 | zip: '92302', 22 | city: 'LA', 23 | state: 'CA', 24 | country: 'usa', 25 | }; 26 | newBun.cache.set('query:user:123:firstName', 'bun'); 27 | newBun.cache.set('query:user:123:address', address); 28 | 29 | let graphQLresponse = { 30 | data: {}, 31 | }; 32 | // from proto, what we want to request: graphql request 33 | const cacheKeys1nest = [ 34 | 'query:user:123:firstName', 35 | 'query:user:123:lastName', 36 | 'query:user:123:email', 37 | 'query:user:123:phoneNumber', 38 | 'query:user:123:address', 39 | ]; 40 | 41 | const missingCacheKeys1nest = [ 42 | 'query:user:123:lastName', 43 | 'query:user:123:email', 44 | 'query:user:123:phoneNumber', 45 | ]; 46 | 47 | const results = generateMissingLRUCachekeys(cacheKeys1nest, newBun.cache); 48 | 49 | expect(results.missingCacheKeys).toEqual(missingCacheKeys1nest); 50 | }); 51 | 52 | test.skip('graphql response generated from current cache', async () => { 53 | const newBun = new BunCache(); 54 | const address = { 55 | id: '234', 56 | street: '123 codesmith st', 57 | zip: '92302', 58 | city: 'LA', 59 | state: 'CA', 60 | country: 'usa', 61 | }; 62 | newBun.cache.set('query:user:123:firstName', 'bun'); 63 | newBun.cache.set('query:user:123:address', address); 64 | 65 | const graphqlResponse2 = { 66 | data: { 67 | user: { 68 | id: '123', 69 | firstName: 'bun', 70 | address: { 71 | id: '234', 72 | city: 'LA', 73 | country: 'usa', 74 | id: '234', 75 | state: 'CA', 76 | street: '123 codesmith st', 77 | zip: '92302', 78 | }, 79 | }, 80 | }, 81 | }; 82 | 83 | const cacheKeys1nest = [ 84 | 'query:user:123:firstName', 85 | 'query:user:123:lastName', 86 | 'query:user:123:email', 87 | 'query:user:123:phoneNumber', 88 | 'query:user:123:address', 89 | ]; 90 | 91 | const results = generateMissingLRUCachekeys(cacheKeys1nest, newBun.cache); 92 | 93 | expect(results.graphQLcachedata).toEqual(graphqlResponse2); 94 | }); 95 | 96 | test.skip('missing POUCHDB cachekeys correctly generated', async () => { 97 | 98 | const localDB = new PouchDB('bundl-database'); 99 | 100 | const doc = { 101 | _id: '123', 102 | firstName: 'bun', 103 | lastName: 'dl', 104 | address: { 105 | _id: '234', 106 | city: 'LA', 107 | country: 'usa', 108 | id: '234', 109 | state: 'CA', 110 | street: '123 codesmith st', 111 | zip: '92302', 112 | }, 113 | _rev: "3-22e04404f44864db1cd84369d732f4b5" 114 | }; 115 | 116 | //await localDB.put(doc) 117 | 118 | // from proto, what we want to request: graphql request 119 | 120 | const graphqlcachedata = { 121 | data: { 122 | user: { 123 | _id: '123', 124 | firstName: 'bun', 125 | }, 126 | }, 127 | }; 128 | 129 | 130 | const missingCacheKeys = [ 131 | 'query:user:123:lastName', 132 | 'query:user:123:email', 133 | 'query:user:123:phoneNumber', 134 | 'query:user:123:address', 135 | ]; 136 | 137 | const missingCacheKeys1nest = [ 138 | 'query:user:123:email', 139 | 'query:user:123:phoneNumber', 140 | ]; 141 | 142 | const results = await generateMissingPouchDBCachekeys( 143 | missingCacheKeys, 144 | graphqlcachedata, 145 | localDB 146 | ); 147 | console.log('results here', results) 148 | 149 | expect(results.missingPouchCacheKeys).toEqual(missingCacheKeys1nest); 150 | }); 151 | 152 | test.skip('missing POUCHDB graphql response correctly generated', async () => { 153 | 154 | const localDB = new PouchDB('bundl-database'); 155 | 156 | const doc = { 157 | _id: '123', 158 | firstName: 'bun', 159 | lastName: 'dl', 160 | address: { 161 | _id: '234', 162 | city: 'LA', 163 | country: 'usa', 164 | id: '234', 165 | state: 'CA', 166 | street: '123 codesmith st', 167 | zip: '92302', 168 | }, 169 | }; 170 | 171 | //await localDB.put(doc) 172 | 173 | // from proto, what we want to request: graphql request 174 | 175 | const graphQLcachedataresults = { 176 | data: { 177 | user: { 178 | _id: '123', 179 | firstName: 'bun', 180 | lastName: 'dl', 181 | address: { 182 | _id: '234', 183 | city: 'LA', 184 | country: 'usa', 185 | id: '234', 186 | state: 'CA', 187 | street: '123 codesmith st', 188 | zip: '92302', 189 | }, 190 | }, 191 | }, 192 | }; 193 | 194 | const graphqlcachedata = { 195 | data: { 196 | user: { 197 | _id: '123', 198 | firstName: 'bun', 199 | }, 200 | }, 201 | }; 202 | 203 | 204 | const missingCacheKeys = [ 205 | 'query:user:123:lastName', 206 | 'query:user:123:email', 207 | 'query:user:123:phoneNumber', 208 | 'query:user:123:address', 209 | ]; 210 | 211 | const missingCacheKeys1nest = [ 212 | 'query:user:123:email', 213 | 'query:user:123:phoneNumber', 214 | ]; 215 | 216 | const results = await generateMissingPouchDBCachekeys( 217 | missingCacheKeys, 218 | graphqlcachedata, 219 | localDB 220 | ); 221 | console.log('results here', results) 222 | 223 | expect(results.graphQLcachedata).toEqual(graphQLcachedataresults); 224 | }); 225 | 226 | test.skip('graphql query from missing cachekeys 1 DEPTH', async () => { 227 | const newBun = new BunCache(); 228 | const address = { 229 | id: '234', 230 | street: '123 codesmith st', 231 | zip: '92302', 232 | city: 'LA', 233 | state: 'CA', 234 | country: 'usa', 235 | }; 236 | newBun.cache.set('query:user:123:firstName', 'bun'); 237 | newBun.cache.set('query:user:123:address', address); 238 | 239 | const graphqlquery = `query { 240 | user (id:123) { 241 | id 242 | lastName 243 | email 244 | phoneNumber 245 | } 246 | }`; 247 | 248 | const missingCacheKeys1nest = [ 249 | 'query:user:123:lastName', 250 | 'query:user:123:email', 251 | 'query:user:123:phoneNumber', 252 | ]; 253 | 254 | const query = generateGraphQLQuery(missingCacheKeys1nest); 255 | 256 | expect(query.replace(/\s/g, '')).toEqual(graphqlquery.replace(/\s/g, '')); 257 | }); 258 | 259 | test('update pouchdb', async () => { 260 | const localDB = new PouchDB('bundl-database'); 261 | const doc = { 262 | _id: 'query:user:123', 263 | firstName: 'bun', 264 | lastName: 'dl', 265 | email: 'bundle@gmail.com', 266 | phoneNumber: '999-999-999', 267 | address: { 268 | _id: '234', 269 | city: 'LA', 270 | country: 'usa', 271 | id: '234', 272 | state: 'CA', 273 | street: '123 codesmith st', 274 | zip: '92302', 275 | }, 276 | }; 277 | 278 | const queryResults = { 279 | data: { 280 | user: { 281 | id: '123', 282 | email: 'bundle@gmail.com', 283 | phoneNumber: '999-999-999', 284 | }, 285 | }, 286 | }; 287 | 288 | const updatedCacheKeys = { 289 | 'query:user:123:email': 'bundle@gmail.com', 290 | 'query:user:123:phoneNumber': '999-999-999', 291 | }; 292 | 293 | const results = await updatePouchDB(updatedCacheKeys, localDB); 294 | expect(results).toEqual(doc); 295 | }); 296 | }); 297 | 298 | test.skip('update missing cache values', async () => { 299 | const queryResults = { 300 | data: { 301 | user: { 302 | id: '123', 303 | lastName: 'dl', 304 | email: 'bundle@gmail.com', 305 | phoneNumber: '999-999-999', 306 | address: { 307 | id: '234', 308 | street: '123 codesmith st', 309 | zip: '92302', 310 | }, 311 | }, 312 | }, 313 | }; 314 | 315 | const missingCacheKeys2nest = [ 316 | 'query:user:123:lastName', 317 | 'query:user:123:email', 318 | 'query:user:123:phoneNumber', 319 | 'query:user:123:address', 320 | ]; 321 | 322 | const updatedCacheKeys = { 323 | 'query:user:123:lastName': 'dl', 324 | 'query:user:123:email': 'bundle@gmail.com', 325 | 'query:user:123:phoneNumber': '999-999-999', 326 | 'query:user:123:address': { 327 | id: '234', 328 | street: '123 codesmith st', 329 | zip: '92302', 330 | }, 331 | }; 332 | 333 | const results = updateMissingCache(queryResults, missingCacheKeys2nest); 334 | expect(results).toEqual(updatedCacheKeys); 335 | }); 336 | 337 | test.skip('mergeGraphQLresponses', async () => { 338 | const newBun = new BunCache(); 339 | 340 | const graphqlResponse2 = { 341 | data: { 342 | user: { 343 | id: '123', 344 | email: 'bundle@gmail.com', 345 | phoneNumber: '999-999-999', 346 | }, 347 | }, 348 | }; 349 | 350 | const graphqlResponse1 = { 351 | data: { 352 | user: { 353 | id: '123', 354 | firstName: 'bun', 355 | lastName: 'dl', 356 | address: { 357 | id: '234', 358 | city: 'LA', 359 | state: 'CA', 360 | country: 'usa', 361 | }, 362 | }, 363 | }, 364 | }; 365 | 366 | const ExpectedMergedGraphqlResponse = { 367 | data: { 368 | user: { 369 | id: '123', 370 | firstName: 'bun', 371 | lastName: 'dl', 372 | email: 'bundle@gmail.com', 373 | phoneNumber: '999-999-999', 374 | address: { 375 | id: '234', 376 | city: 'LA', 377 | state: 'CA', 378 | country: 'usa', 379 | }, 380 | }, 381 | }, 382 | }; 383 | 384 | const mergedgraphQLresponse = mergeGraphQLresponses( 385 | graphqlResponse1, 386 | graphqlResponse2 387 | ); 388 | expect(mergedgraphQLresponse).toEqual(ExpectedMergedGraphqlResponse); 389 | }); 390 | -------------------------------------------------------------------------------- /bunDL-client/__test__/cacheKey.test.js: -------------------------------------------------------------------------------- 1 | import { generateCacheKeys } from './bunDL-client/src/helpers/cacheKeys.js'; 2 | import { expect, test, describe } from 'bun:test'; 3 | 4 | describe('generateCacheKeys function', () => { 5 | test('should generate the cache keys from given proto', () => { 6 | const proto = { 7 | fields: { 8 | user: { 9 | $id: '123', 10 | id: true, 11 | firstName: true, 12 | lastName: true, 13 | email: true, 14 | phoneNumber: true, 15 | address: { 16 | $id: '456', 17 | street: true, 18 | city: true, 19 | state: true, 20 | zip: true, 21 | country: true, 22 | test: { 23 | $id: 789, 24 | id: true, 25 | test1: true, 26 | }, 27 | }, 28 | }, 29 | }, 30 | fragsDefinitions: {}, 31 | primaryQueryType: 'user', 32 | fragmentType: '', 33 | variableValues: { 34 | user: { 35 | id: '123', 36 | }, 37 | address: { 38 | id: '456', 39 | }, 40 | test: { 41 | id: '789', 42 | }, 43 | }, 44 | operation: 'query', 45 | }; 46 | 47 | const expectedKeys = [ 48 | 'query:user:$123:id', 49 | 'query:user:$123:firstName', 50 | 'query:user:$123:lastName', 51 | 'query:user:$123:email', 52 | 'query:user:$123:phoneNumber', 53 | 'query:address:$456:street', 54 | 'query:address:$456:city', 55 | 'query:address:$456:state', 56 | 'query:address:$456:zip', 57 | 'query:address:$456:country', 58 | 'query:test:$789:id', 59 | 'query:test:$789:test1', 60 | ]; 61 | 62 | const resultKeys = generateCacheKeys(proto); 63 | console.log(resultKeys); 64 | expect(resultKeys).toEqual(expectedKeys); 65 | }); 66 | }); 67 | -------------------------------------------------------------------------------- /bunDL-client/__test__/extractAST.test.js: -------------------------------------------------------------------------------- 1 | import { parse } from 'graphql'; 2 | import extractAST from './bunDL-client/src/helpers/extractAST.js'; 3 | // import { parseAST } from './bunDL-client/src/helpers/parseAST.js'; 4 | import { expect, test, describe } from 'bun:test'; 5 | 6 | describe('extractAST function', () => { 7 | test('should correctly extract operationType from a simple query', () => { 8 | const sampleAST = parse(` 9 | query { 10 | artist { 11 | id 12 | name 13 | albums { 14 | id 15 | name 16 | } 17 | } 18 | } 19 | `); 20 | 21 | const { proto, operationType } = extractAST(sampleAST, { 22 | cacheMetadata: false, 23 | cacheVariables: true, 24 | requireArguments: true, 25 | }); 26 | 27 | // console.log(JSON.stringify(proto, null, 2)); 28 | expect(operationType).toBe('noArguments'); 29 | }); 30 | 31 | // need to add testing directives, variables, fragment spreads, inline fragments, etc. 32 | 33 | test('should handle fields with aliases if cacheMetadata is false', () => { 34 | const sampleAST = parse(` 35 | { 36 | user (id: 123) { 37 | id 38 | firstName: name 39 | address { 40 | id 41 | street 42 | city 43 | } 44 | } 45 | } 46 | `); 47 | 48 | const result = extractAST(sampleAST, { 49 | cacheMetadata: true, 50 | cacheVariables: true, 51 | }); 52 | 53 | // console.log(JSON.stringify(result, null, 2)); 54 | expect(result.proto.fields.user.firstName.subdata.name).toBe('name'); 55 | expect(result.proto.fields.user.firstName.subdata.alias).toBe('firstName'); 56 | }); 57 | }); 58 | 59 | test('should handle fields without metadata', () => { 60 | const sampleAST = parse( 61 | `{ 62 | user (id: "6521aebe1882b34d9bc89017"){ 63 | id 64 | firstName: name 65 | address { 66 | id 67 | street 68 | city 69 | } 70 | } 71 | }` 72 | ); 73 | 74 | const result = extractAST(sampleAST, { 75 | cacheMetadata: false, 76 | cacheVariables: true, 77 | requireIdArg: true, 78 | }); 79 | 80 | // console.log(JSON.stringify(result, null, 2)); 81 | expect(result.proto.fields.user.firstName).toBe(true); 82 | expect(result.proto.fields.user.address.street).toBe(true); 83 | }); 84 | 85 | test('should handle arguments', () => { 86 | const sampleAST = parse( 87 | `{ 88 | user (id: "6521aebe1882b34d9bc89017") { 89 | id 90 | firstName 91 | lastName 92 | email 93 | phoneNumber 94 | address (id: "123") { 95 | street 96 | city 97 | state 98 | zip 99 | country 100 | } 101 | } 102 | }` 103 | ); 104 | 105 | const result = extractAST(sampleAST, { 106 | cacheVariables: true, 107 | requireArguments: true, 108 | }); 109 | 110 | console.log(JSON.stringify(result, null, 2)); 111 | expect(result.proto.fields.user.$id).toBe('6521aebe1882b34d9bc89017'); 112 | }); 113 | 114 | test('should handle dynamic variables', () => { 115 | const sampleAST = parse( 116 | `query ($userId: String) { 117 | user (id: $userId) { 118 | id 119 | firstName 120 | lastName 121 | email 122 | phoneNumber 123 | address (id: 456) { 124 | id 125 | street 126 | city 127 | state 128 | zip 129 | country 130 | } 131 | } 132 | }` 133 | ); 134 | 135 | // Variable values object 136 | const variables = { 137 | // Fake value for $userId variable 138 | userId: '123', 139 | }; 140 | const { proto, operationType } = extractAST( 141 | sampleAST, 142 | { 143 | cacheMetadata: true, 144 | cacheVariables: true, 145 | }, 146 | variables 147 | ); 148 | // console.log(JSON.stringify(proto, null, 2)); 149 | expect(proto.variableValues.user.userId).toBe('123'); 150 | }); 151 | 152 | test('should handle directives', () => { 153 | const sampleAST = parse(` 154 | { 155 | user @client { 156 | id 157 | name 158 | } 159 | } 160 | `); 161 | 162 | const { proto, operationType } = extractAST(sampleAST, { 163 | cacheMetadata: true, 164 | cacheVariables: true, 165 | }); 166 | 167 | // console.log(JSON.stringify(proto, null, 2)); 168 | expect(operationType).toBe('noBuns'); 169 | }); 170 | 171 | test('should handle fragment spreads', () => { 172 | const sampleAST = parse(` 173 | 174 | { 175 | user { 176 | ...userInfo 177 | address { 178 | id 179 | street 180 | } 181 | } 182 | } 183 | 184 | fragment userInfo on User { 185 | id 186 | name 187 | } 188 | `); 189 | 190 | const { proto, operationType } = extractAST(sampleAST, { 191 | cacheMetadata: false, 192 | cacheVariables: true, 193 | }); 194 | 195 | // console.log(JSON.stringify(proto, null, 2)); 196 | expect(proto.fields.user.name).toBe(true); 197 | expect(proto.fields.user.id).toBe(true); 198 | }); 199 | 200 | test('should correctly identify requireArguments to be false', () => { 201 | const sampleAST = parse(` 202 | { 203 | users { 204 | name 205 | age 206 | } 207 | } 208 | `); 209 | 210 | const { proto, operationType } = extractAST(sampleAST, { 211 | cacheMetadata: false, 212 | cacheVariables: true, 213 | requireArguments: false, 214 | }); 215 | 216 | console.log(JSON.stringify(proto, null, 2)); 217 | // console.log(operationType); 218 | expect(operationType).toBe('query'); 219 | }); 220 | 221 | test('should handle subscription queries', () => { 222 | const sampleAST = parse(` 223 | subscription { 224 | userAdded { 225 | id 226 | name 227 | } 228 | } 229 | `); 230 | 231 | const { proto, operationType } = extractAST(sampleAST, { 232 | cacheMetadata: true, 233 | cacheVariables: true, 234 | }); 235 | 236 | // console.log(JSON.stringify(proto, null, 2)); 237 | expect(operationType).toBe('noBuns'); 238 | }); 239 | 240 | test('should require arguments', () => { 241 | const sampleAST = parse(` 242 | { 243 | users { 244 | id 245 | name 246 | age 247 | } 248 | } 249 | `); 250 | 251 | const { proto, operationType } = extractAST(sampleAST, { 252 | cacheMetadata: false, 253 | cacheVariables: true, 254 | requireArguments: true, 255 | }); 256 | // console.log(JSON.stringify(proto, null, 2)); 257 | expect(operationType).toBe('noArguments'); 258 | }); 259 | -------------------------------------------------------------------------------- /bunDL-client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bundl-cache", 3 | "version": "1.0.6", 4 | "description": "bunDL is an intuitive, skinny GraphQL interceptor, that checks for cached data, handles mutations with PouchDB, and only sends modified or non-cached queries to the server. The returned data is then cached for future requests.", 5 | "main": "./src/bunCache.js", 6 | "files": [ 7 | "src/**/*", 8 | "package.json", 9 | "README.md" 10 | ], 11 | "scripts": { 12 | "start": "bun run ./src/bunCache.js" 13 | }, 14 | "repository": { 15 | "type": "git", 16 | "url": "git+https://github.com/oslabs-beta/BunDL.git#main" 17 | }, 18 | "author": "bunDL", 19 | "license": "ISC", 20 | "bugs": { 21 | "url": "https://github.com/oslabs-beta/BunDL/issues" 22 | }, 23 | "homepage": "https://github.com/oslabs-beta/BunDL/tree/main#readme", 24 | "dependencies": { 25 | "graphql": "15.8.0", 26 | "graphql-tools": "9.0.0", 27 | "lru-cache": "^10.0.1", 28 | "pouchdb": "^8.0.1", 29 | "pouchdb-adapter-idb": "^8.0.1", 30 | "pouchdb-browser": "^8.0.1" 31 | }, 32 | "keywords": [ 33 | "graphQL", 34 | "cache", 35 | "caching", 36 | "client-side", 37 | "bun", 38 | "query" 39 | ] 40 | } 41 | -------------------------------------------------------------------------------- /bunDL-client/src/bunCache.js: -------------------------------------------------------------------------------- 1 | import { parse } from 'graphql'; 2 | import extractAST from './helpers/extractAST.js'; 3 | import generateCacheKeys from './helpers/cacheKeys'; 4 | import { db } from './helpers/pouchHelpers.js'; 5 | import { LRUCache } from 'lru-cache'; 6 | import { 7 | generateGraphQLQuery, 8 | generateMissingLRUCachekeys, 9 | mergeGraphQLresponses, 10 | updateMissingCache, 11 | generateMissingPouchDBCachekeys, 12 | } from './helpers/queryHelpers.js'; 13 | 14 | const defaultConfig = { 15 | cacheMetadata: false, 16 | cacheVariables: true, 17 | requireArguments: true, 18 | }; 19 | 20 | export default class BunDL { 21 | constructor(maxSize = 100, userConfig = {}) { 22 | this.config = { ...defaultConfig, ...userConfig }; 23 | // Create a new LRU Cache instance 24 | this.cache = new LRUCache({ 25 | //specifies how many items can be in the cache 26 | max: maxSize, 27 | }); 28 | this.pouchDB = db; 29 | this.clientQuery = this.query.bind(this); 30 | this.fetchFromGraphQL = this.fetchFromGraphQL.bind(this); 31 | } 32 | 33 | async query(endPoint, query) { 34 | if (!query) { 35 | throw new Error('Query is undefined or empty: ', query); 36 | } 37 | 38 | const start = performance.now(); 39 | let end; 40 | let speed; 41 | const AST = parse(query); 42 | const { proto, operationType } = extractAST(AST, this.config); 43 | 44 | if (proto.operation === 'mutation') { 45 | this.cache.clear(); 46 | const mutationResults = await this.fetchFromGraphQL(query); 47 | return mutationResults; 48 | } 49 | 50 | if (operationType === 'noBuns' || operationType === 'noArguments') { 51 | const queryResults = await this.fetchFromGraphQL(endPoint, query); // 52 | end = performance.now(); 53 | let cachedata = { cache: 'hit', speed: end - start }; 54 | if (queryResults) { 55 | return { queryResults, cachedata }; 56 | } 57 | } 58 | 59 | //create the cache keys 60 | const cacheKeys = generateCacheKeys(proto); 61 | 62 | // check the LRU cache if this key already exists 63 | const { missingCacheKeys, graphQLcachedata } = generateMissingLRUCachekeys( 64 | cacheKeys, 65 | this.cache 66 | ); 67 | 68 | // if missing cache keys array has items, meaning LRU cache does not have all requested kery 69 | if (missingCacheKeys.length > 0) { 70 | //if pouch has some or any of missing cache keys 71 | 72 | const { updatedgraphQLcachedata, missingPouchCacheKeys } = 73 | await generateMissingPouchDBCachekeys(missingCacheKeys, graphQLcachedata, this.pouchDB); 74 | 75 | if (!missingPouchCacheKeys.length) { 76 | const updatedCacheKeys = updateMissingCache(updatedgraphQLcachedata, missingCacheKeys); 77 | 78 | for (const keys in updatedCacheKeys) { 79 | this.cache.set(keys, updatedCacheKeys[keys]); 80 | } 81 | end = performance.now(); 82 | speed = end - start; 83 | let cachedata = { cache: 'hit', speed: speed }; 84 | return { updatedgraphQLcachedata, cachedata }; 85 | } else { 86 | const graphQLquery = generateGraphQLQuery(missingPouchCacheKeys); 87 | 88 | const { returnObj, cachedata } = await this.fetchFromGraphQL(endPoint, graphQLquery); 89 | 90 | //update cachekeys from queryResults 91 | const updatedCacheKeys = updateMissingCache(returnObj, missingPouchCacheKeys); 92 | 93 | //update lru cache with queryresults 94 | for (const keys in updatedCacheKeys) { 95 | this.cache.set(keys, updatedCacheKeys[keys]); 96 | } 97 | 98 | //generate graphQL response from cache and merge response 99 | const newgraphql = mergeGraphQLresponses(updatedgraphQLcachedata, returnObj); 100 | return { newgraphql, cachedata }; 101 | } 102 | } 103 | end = performance.now(); 104 | speed = end - start; 105 | let cachedata = { cache: 'hit', speed: speed }; 106 | return { graphQLcachedata, cachedata }; 107 | } 108 | async fetchFromGraphQL(endPoint, query) { 109 | try { 110 | const response = await fetch(endPoint, { 111 | method: 'POST', 112 | body: JSON.stringify({ query: query }), 113 | headers: { 'Content-Type': 'application/json' }, 114 | }); 115 | if (!response.ok) { 116 | throw new Error(`HTTP error! Status: ${response.status}`); 117 | } 118 | return await response.json(); 119 | } catch (error) { 120 | console.error('Error during fetch:', error); 121 | throw error; 122 | } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /bunDL-client/src/helpers/cacheKeys.js: -------------------------------------------------------------------------------- 1 | const generateCacheKeys = (proto) => { 2 | let resultKeys = []; 3 | 4 | const { fields, operation, variableValues } = proto; 5 | 6 | // Helper function to traverse nested fields 7 | const traverseFields = (fields, primaryType) => { 8 | for (const [field, value] of Object.entries(fields)) { 9 | // Check if it's a nested field and inspect the first key 10 | const firstKey = Object.keys(value)[0]; 11 | if (typeof value === 'object' && firstKey && firstKey.startsWith('$')) { 12 | // recursively call the nested fields 13 | // this will also set the new primaryType to be the field 14 | traverseFields(value, field); 15 | } else if (value === true) { 16 | // use the id to set the primaryType 17 | const idValue = variableValues[primaryType].id; 18 | // store the path into the resultKeys variable 19 | resultKeys.push(`${operation}:${primaryType}:${idValue}:${field}`); 20 | } 21 | } 22 | }; 23 | 24 | traverseFields(fields[proto.primaryQueryType], proto.primaryQueryType); 25 | 26 | return resultKeys; 27 | }; 28 | 29 | export default generateCacheKeys; 30 | -------------------------------------------------------------------------------- /bunDL-client/src/helpers/extractAST.js: -------------------------------------------------------------------------------- 1 | import { visit, BREAK } from 'graphql'; 2 | 3 | function extractAST(AST, config, variables = {}) { 4 | let operationType = ''; 5 | const setPath = []; 6 | const proto = { 7 | fields: {}, 8 | fragsDefinitions: {}, 9 | primaryQueryType: '', 10 | variableValues: {}, 11 | }; 12 | 13 | function setNestedProperty(obj, pathArray, value) { 14 | let current = obj; 15 | for (let i = 0; i < pathArray.length; i++) { 16 | const key = pathArray[i]; 17 | if (typeof current[key] === 'boolean' || !current[key]) { 18 | // If the key doesn't exist or it's a primitive, set it to an empty object 19 | current[key] = {}; 20 | } 21 | // If it's the last key in the path, set the value 22 | if (i === pathArray.length - 1) { 23 | if (typeof value === 'boolean') current[key] = value; 24 | else if (typeof value === 'object') current[key].subdata = value; 25 | else current[key] = value; 26 | } else { 27 | // Otherwise, traverse deeper 28 | current = current[key]; 29 | } 30 | } 31 | } 32 | 33 | let hasArguments = false; 34 | visit(AST, { 35 | Argument() { 36 | hasArguments = true; 37 | return BREAK; 38 | }, 39 | }); 40 | 41 | if (!hasArguments && config.requireArguments) { 42 | return { proto: null, operationType: 'noArguments' }; 43 | } 44 | 45 | visit(AST, { 46 | FragmentDefinition(node) { 47 | const fragName = node.name.value; 48 | proto.fragsDefinitions[fragName] = {}; 49 | for (const selections of node.selectionSet.selections) { 50 | if (selections.kind !== 'InlineFragment') { 51 | proto.fragsDefinitions[fragName][selections.name.value] = true; 52 | } 53 | } 54 | }, 55 | }); 56 | 57 | visit(AST, { 58 | enter(node) { 59 | //conditionals within queries (skip this field, or include this field) 60 | // @ symbol = directives in the discord example ken pasted: FetchUserData 61 | if (node.directives && node.directives.length > 0) { 62 | operationType = 'noBuns'; 63 | return BREAK; 64 | } 65 | }, 66 | OperationDefinition(node) { 67 | operationType = node.operation; 68 | proto.operation = operationType; 69 | 70 | if (node.selectionSet.selections[0].typeCondition) { 71 | proto.primaryQueryType = node.selectionSet.selections[0].typeCondition.name.value; 72 | } else { 73 | proto.primaryQueryType = node.selectionSet.selections[0].name.value; 74 | } 75 | 76 | if (operationType === 'subscription') { 77 | operationType = 'noBuns'; 78 | return BREAK; 79 | } 80 | }, 81 | 82 | Variable(node, key, parent, path, ancestors) { 83 | if (!config.cacheVariables) { 84 | operationType = 'noBuns'; 85 | return BREAK; 86 | } 87 | 88 | let fieldName; 89 | if (ancestors[ancestors.length - 2].kind === 'Field') { 90 | fieldName = ancestors[ancestors.length - 2].name.value; 91 | } 92 | if (variables && fieldName) { 93 | for (let [key, value] of Object.entries(variables)) { 94 | proto.variableValues[fieldName] = proto.variableValues[fieldName] || {}; 95 | proto.variableValues[fieldName][key] = value; 96 | } 97 | } 98 | }, 99 | 100 | Argument(node, key, parent, path, ancestors) { 101 | function deepCheckArg(arg) { 102 | if (arg.kind === 'ObjectValue' || arg.kind === 'ListValue' || arg.kind === 'NullValue') { 103 | operationType = 'noBuns'; 104 | return BREAK; 105 | } else if (arg.kind === 'Variable' && config.cacheVariables) { 106 | return arg.name.value; 107 | } else { 108 | if (ancestors[ancestors.length - 1].kind === 'Field') { 109 | const fieldName = ancestors[ancestors.length - 1].name.value; 110 | proto.variableValues[fieldName] = proto.variableValues[fieldName] || {}; 111 | proto.variableValues[fieldName][node.name.value] = arg.value; 112 | } 113 | return arg.value; 114 | } 115 | } 116 | 117 | const argValue = deepCheckArg(node.value); 118 | setNestedProperty(proto.fields, [...setPath, '$' + node.name.value], argValue); 119 | }, 120 | 121 | Field: { 122 | enter(node, key, parent) { 123 | if (node.name.value.includes('__')) { 124 | operationType = 'noBuns'; 125 | return BREAK; 126 | } 127 | if (node.directives && node.directives.length) { 128 | operationType = 'noBuns'; 129 | return BREAK; 130 | } 131 | // Use the original field name as the key in proto structure 132 | const fieldSubdata = { 133 | // The actual field name as specified in the GraphQL query (e.g., "name" not "firstName") 134 | name: node.name.value, 135 | args: node.arguments 136 | ? // An array of arguments if they exist, otherwise an empty array. 137 | node.arguments.map((arg) => ({ 138 | // The name of the argument. 139 | name: arg.name.value, 140 | // The value of the argument. 141 | value: arg.value.value, 142 | })) 143 | : null, 144 | // The alias of the field if it exists, otherwise null. 145 | alias: node.alias ? node.alias.value : null, 146 | // Currently always set to null. Acting as a placeholder for now 147 | type: null, 148 | }; 149 | 150 | // Push to path based on alias if it exists or field name if it doesn't. 151 | const pathName = node.alias ? node.alias.value : node.name.value; 152 | setPath.push(pathName); 153 | 154 | if (config.cacheMetadata) { 155 | setNestedProperty(proto.fields, setPath, fieldSubdata); 156 | } else { 157 | setNestedProperty(proto.fields, setPath, true); 158 | } 159 | 160 | if (node.selectionSet) { 161 | for (const selection of node.selectionSet.selections) { 162 | if (selection.kind === 'FragmentSpread') { 163 | const fragmentFields = proto.fragsDefinitions[selection.name.value]; 164 | for (let fieldName in fragmentFields) { 165 | setNestedProperty(proto.fields, setPath.concat([fieldName]), true); 166 | } 167 | } 168 | } 169 | } 170 | }, 171 | leave() { 172 | setPath.pop(); 173 | }, 174 | }, 175 | 176 | SelectionSet: { 177 | enter(node, key, parent) { 178 | if (parent && !Array.isArray(parent) && parent.kind === 'Field') { 179 | const fieldsValues = {}; 180 | 181 | for (const field of node.selections) { 182 | if ( 183 | field.kind !== 'InlineFragment' && 184 | (field.kind === 'FragmentSpread' || !field.selectionSet) 185 | ) { 186 | fieldsValues[field.name.value] = true; 187 | } 188 | } 189 | } 190 | }, 191 | leave() { 192 | setPath.pop(); 193 | }, 194 | }, 195 | }); 196 | return { proto, operationType }; 197 | } 198 | 199 | export default extractAST; 200 | -------------------------------------------------------------------------------- /bunDL-client/src/helpers/pouchHelpers.js: -------------------------------------------------------------------------------- 1 | import pouchdb from 'pouchdb'; 2 | 3 | const db = new pouchdb('bundl-database'); 4 | 5 | let dbName = Bun.env.COUCHDB_DB_NAME; 6 | let pouchURL = Bun.env.POUCHDB_URL; 7 | let username = Bun.env.POUCHDB_USERNAME; 8 | let password = Bun.env.POUCHDB_PASSWORD; 9 | 10 | if (!dbName || !pouchURL || !username || !password) { 11 | try { 12 | const config = JSON.parse(await Bun.file('./config.json').text()); 13 | dbName = config.couchDBName; 14 | pouchURL = config.pouchURL; 15 | username = config.username; 16 | password = config.password; 17 | } catch (error) { 18 | console.error('no couchDB credentials found!'); 19 | } 20 | } 21 | if (dbName && pouchURL && username && password) { 22 | const remoteDB = new pouchdb(`${pouchURL}/${dbName}`, { 23 | auth: { 24 | username, 25 | password, 26 | }, 27 | }); 28 | const sync = db.sync(remoteDB, { live: true }); 29 | sync.on('error', function (err) { 30 | console.error('Sync Error', err); 31 | }); 32 | } 33 | 34 | export { db }; 35 | -------------------------------------------------------------------------------- /bunDL-client/src/helpers/queryHelpers.js: -------------------------------------------------------------------------------- 1 | const generateGraphQLQuery = (keys) => { 2 | const queryMap = {}; 3 | 4 | keys.forEach((key) => { 5 | const parts = key.split(':'); 6 | const typeName = parts[1]; 7 | const typeID = parts[2]; 8 | const field = parts.slice(3).join(':'); 9 | 10 | if (!queryMap[typeName]) { 11 | queryMap[typeName] = { 12 | id: typeID, 13 | fields: [], 14 | }; 15 | } 16 | queryMap[typeName].fields.push(field); 17 | }); 18 | 19 | const queries = Object.keys(queryMap).map((typeName) => { 20 | const type = queryMap[typeName]; 21 | const fields = type.fields.join('\n'); 22 | return `${typeName}(id: "${type.id}") { 23 | id 24 | ${fields} 25 | }`; 26 | }); 27 | 28 | let query = `query { 29 | ${queries.join('\n')} 30 | `; 31 | 32 | for (const keys in queryMap) { 33 | query += '}'; 34 | } 35 | 36 | return query; 37 | }; 38 | 39 | const generateMissingPouchDBCachekeys = async (cacheKeys, graphQLcachedata, localDB) => { 40 | const missingPouchCacheKeys = []; 41 | let data = graphQLcachedata.data; 42 | const docRequests = {}; 43 | 44 | cacheKeys.forEach((keys) => { 45 | const key = keys.split(':').slice(0, 3).join(':'); 46 | if (!docRequests[key]) docRequests[key] = []; 47 | docRequests[key].push(keys.split(':').slice(3).join('')); 48 | }); 49 | 50 | for (const key in docRequests) { 51 | const typeName = key.split(':').slice(1, 2).join(''); 52 | const id = key.split(':').slice(2).join(''); 53 | 54 | try { 55 | let doc = await localDB.get(id); 56 | if (doc) { 57 | const fields = docRequests[key]; 58 | fields.forEach((field) => { 59 | if (doc[field]) { 60 | data[typeName] = data[typeName] || {}; 61 | data[typeName][field] = doc[field]; 62 | } else { 63 | missingPouchCacheKeys.push(`${key}:${field}`); 64 | } 65 | }); 66 | } else { 67 | const fields = docRequests[key]; 68 | fields.forEach((field) => { 69 | missingPouchCacheKeys.push(`${key}:${field}`); 70 | }); 71 | } 72 | } catch (err) { 73 | console.log(err); 74 | } 75 | } 76 | 77 | const updatedgraphQLcachedata = data; 78 | 79 | return { updatedgraphQLcachedata, missingPouchCacheKeys }; 80 | }; 81 | 82 | const updatePouchDB = async (updatedCacheKeys, localDB) => { 83 | const obj = {}; 84 | 85 | for (const keys in updatedCacheKeys) { 86 | const key = keys.split(':').slice(0, 3).join(':'); 87 | const field = keys.split(':').slice(3).join(''); 88 | if (!obj[key]) { 89 | obj[key] = {}; 90 | } 91 | obj[key][field] = updatedCacheKeys[keys]; 92 | } 93 | 94 | for (const key in obj) { 95 | const fields = obj[key]; 96 | try { 97 | const id = key.split(':').slice(2).join(''); 98 | const doc = await localDB.get(id); 99 | 100 | if (doc) { 101 | let copy = { ...doc }; 102 | for (const field in fields) { 103 | copy[field] = fields[field]; 104 | } 105 | await localDB.put(copy); 106 | } else { 107 | await localDB.put(id, fields); 108 | } 109 | } catch (err) { 110 | console.log(err); 111 | } 112 | } 113 | }; 114 | 115 | const updateMissingCache = (queryResults, missingCacheKeys) => { 116 | const updatedCache = {}; 117 | const data = Object.values(queryResults)[0]; 118 | 119 | missingCacheKeys.forEach((cacheKey) => { 120 | const key = cacheKey.split(':'); 121 | const field = key.slice(3); 122 | field.forEach((eachField) => { 123 | if (data[eachField]) updatedCache[cacheKey] = data[eachField]; 124 | }); 125 | }); 126 | 127 | return updatedCache; 128 | }; 129 | 130 | const mergeGraphQLresponses = (obj1, obj2) => { 131 | const merged = { ...obj1 }; 132 | for (const key in obj2) { 133 | if (typeof obj2[key] === 'object' && obj1[key] && typeof obj1[key] === 'object') { 134 | merged[key] = mergeGraphQLresponses(obj1[key], obj2[key]); 135 | } else { 136 | merged[key] = obj2[key]; 137 | } 138 | } 139 | return merged; 140 | }; 141 | 142 | const generateMissingLRUCachekeys = (cacheKeys, LRUcache) => { 143 | const organizedKeys = {}; 144 | const graphQLcachedata = { 145 | data: {}, 146 | }; 147 | const cacheKeysInLRU = LRUcache.keys(); 148 | console.log('cacheKeysInLRU', cacheKeysInLRU); 149 | console.log(cacheKeys); 150 | //process the cache keys 151 | cacheKeys.forEach((key) => { 152 | // loop through each key and organize them by entity and ID 153 | // example: query:user:123:name = {user: {123: ['name']}} 154 | const [_, entityType, entityId, ...fields] = key.split(':'); 155 | // if the entity doesn't exist in our 'organizedKeys' object, then create it 156 | if (!organizedKeys[entityType]) { 157 | organizedKeys[entityType] = {}; 158 | console.log('organizedKeys', organizedKeys); 159 | } 160 | // create an array for the entityId if it doesn't exist as well 161 | if (!organizedKeys[entityType][entityId]) { 162 | organizedKeys[entityType][entityId] = []; 163 | } 164 | // append the fields of the current key to the entityId 165 | organizedKeys[entityType][entityId].push(fields.join(':')); 166 | console.log('organizedKeys2', organizedKeys); 167 | }); 168 | //recursively process each entity and its nested entities if they exist to create a GraphQL response structure 169 | const buildData = (entityType, entityId) => { 170 | // get the fields associated with this entity and ID 171 | const fields = organizedKeys[entityType][entityId]; 172 | const resultingValue = { id: entityId }; 173 | // console.log('result', JSON.parse(JSON.stringify(resultingValue))); 174 | 175 | console.log('fields', fields); 176 | console.log('resultingValue', resultingValue); 177 | 178 | // iterate over each field within the 'entityType' and 'entityId' ('user', '123') 179 | fields.forEach((field) => { 180 | // try fetching the value for this field within the LRU cache 181 | const value = LRUcache.get(`query:${entityType}:${entityId}:${field}`); 182 | console.log('lruCache: ', value); 183 | resultingValue[field] = value; 184 | console.log('result[field]', resultingValue[field]); 185 | }); 186 | return resultingValue; 187 | }; 188 | // split the first key and retrieve the top level type (i.e. query:user:123:name = 'user') 189 | const [_, topLevelEntity] = cacheKeys[0].split(':'); 190 | // grab the ID for the top-level entity 191 | console.log('organizedKeys[topLevelEntity]', organizedKeys[topLevelEntity]); 192 | const topLevelEntityId = Object.keys(organizedKeys[topLevelEntity])[0]; 193 | console.log('topLevelEntityId', topLevelEntityId); 194 | // invoke buildData with 'user' and '123' to recursively process assembling graphQL response data 195 | 196 | graphQLcachedata.data[topLevelEntity] = buildData(topLevelEntity, topLevelEntityId); 197 | console.log('graphQLcachedata.data[topLevelEntity]', graphQLcachedata.data[topLevelEntity]); 198 | // check if nested entities haven't been proccessed yet 199 | Object.keys(organizedKeys).forEach((entityType) => { 200 | // conditional to ensure that we're not reprocessing the top entity again 201 | console.log('entityType', entityType); 202 | if (entityType !== topLevelEntity) { 203 | const nestedEntityId = Object.keys(organizedKeys[entityType])[0]; 204 | console.log('nestedEntityId', nestedEntityId); 205 | // conditional to append nested entities to the top level entity 206 | if (!graphQLcachedata.data[topLevelEntity][entityType]) { 207 | graphQLcachedata.data[topLevelEntity][entityType] = buildData(entityType, nestedEntityId); 208 | console.log( 209 | 'graphQLcachedata.data[topLevelEntity][entityType]', 210 | graphQLcachedata.data[topLevelEntity][entityType] 211 | ); 212 | } 213 | } 214 | }); 215 | // filter out any cache keys that weren't present in the lru cache 216 | const missingCacheKeys = cacheKeys.filter((key) => !LRUcache.has(key)); 217 | 218 | // Return the missing cache keys and the updated GraphQL data 219 | return { missingCacheKeys, graphQLcachedata }; 220 | }; 221 | 222 | export { 223 | generateGraphQLQuery, 224 | generateMissingLRUCachekeys, 225 | mergeGraphQLresponses, 226 | updateMissingCache, 227 | generateMissingPouchDBCachekeys, 228 | updatePouchDB, 229 | }; 230 | -------------------------------------------------------------------------------- /bunDL-server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bundl-server", 3 | "version": "1.0.4", 4 | "description": "bunDL is an intuitive, skinny GraphQL interceptor, that checks for cached data, handles mutations with PouchDB, and only sends modified or non-cached queries to the server. The returned data is then cached for future requests.", 5 | "main": "./src/bundl.js", 6 | "files": [ 7 | "src/**/*", 8 | "package.json", 9 | "README.md" 10 | ], 11 | "scripts": { 12 | "test": "echo \"Error: no test specified\" && exit 1", 13 | "redis": "redis-server --loadmodule ./RedisJSON/target/release/librejson.dylib " 14 | }, 15 | "repository": { 16 | "type": "git", 17 | "url": "git+https://github.com/oslabs-beta/BunDL.git#main" 18 | }, 19 | "author": "bunDL", 20 | "license": "ISC", 21 | "bugs": { 22 | "url": "https://github.com/oslabs-beta/BunDL/issues" 23 | }, 24 | "homepage": "https://github.com/oslabs-beta/BunDL/tree/main#readme", 25 | "devDependencies": { 26 | "bun-types": "latest" 27 | }, 28 | "dependencies": { 29 | "graphql": "15.8.0", 30 | "graphql-tools": "9.0.0", 31 | "ioredis": "^5.3.2", 32 | "ioredis-rejson": "^1.0.10", 33 | "redis": "^4.6.10" 34 | }, 35 | "keywords": [ 36 | "graphQL", 37 | "cache", 38 | "caching", 39 | "server-side", 40 | "bun", 41 | "query" 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /bunDL-server/src/__test__/functest.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Merges specified fields from a source object into a target object, recursively handling nested objects. 3 | * Only the fields that are specified in the target object will be merged from the source object. 4 | * @param {Object} proto - The object specifying the structure and fields to be merged from redisData. 5 | * @param {Object} redisData - The source object from which data will be merged. 6 | * @returns {Object} - The resultant object after merging specified fields from redisData. 7 | */ 8 | function handleCacheHit(proto, redisData, start) { 9 | const end = performance.now(); 10 | const speed = end - start; 11 | console.log('🐇 Data retrieved from Redis Cache 🐇'); 12 | console.log('🐇 cachespeed', speed, ' 🐇'); 13 | const cachedata = { cache: 'hit', speed: end - start }; 14 | 15 | const returnObj = deepAssign({ ...proto.fields }, redisData); 16 | return { returnObj, cachedata }; 17 | } 18 | 19 | /** 20 | * Recursively merges properties from the source object into the target object, but only if they are specified in the target object. 21 | * @param {Object} target - The object into which properties will be merged. 22 | * @param {Object} source - The object from which properties will be merged. 23 | * @returns {Object} - The target object after merging. 24 | */ 25 | const deepAssign = (target, source) => { 26 | for (const key in target) { 27 | if (target.hasOwnProperty(key)) { 28 | if ( 29 | Object.prototype.toString.call(target[key]) === '[object Object]' && 30 | Object.prototype.toString.call(source[key]) === '[object Object]' 31 | ) { 32 | target[key] = deepAssign(target[key], source[key]); 33 | } else if (source.hasOwnProperty(key)) { 34 | target[key] = source[key]; 35 | } 36 | } 37 | } 38 | return target; 39 | }; 40 | -------------------------------------------------------------------------------- /bunDL-server/src/bundl.js: -------------------------------------------------------------------------------- 1 | import { graphql } from 'graphql'; 2 | import interceptQueryAndParse from './helpers/intercept-and-parse-logic'; 3 | import extractAST from './helpers/prototype-logic'; 4 | import { extractIdFromQuery } from './helpers/queryObjectFunctions'; 5 | import redisCacheMain from './helpers/redisConnection'; 6 | 7 | const defaultConfig = { 8 | cacheVariables: true, 9 | cacheMetadata: false, 10 | requireArguments: false, 11 | }; 12 | 13 | export default class BunDL { 14 | constructor({ schema, cacheExpiration, redisPort, redisHost, userConfig }) { 15 | this.config = { ...defaultConfig, ...userConfig }; 16 | this.schema = schema; 17 | this.cacheExpiration = cacheExpiration; 18 | this.redisPort = redisPort; 19 | this.redisHost = redisHost; 20 | this.redisCache = redisCacheMain; 21 | this.query = this.query.bind(this); 22 | this.mergeObjects = this.mergeObjects.bind(this); 23 | this.handleCacheHit = this.handleCacheHit.bind(this); 24 | this.handleCacheMiss = this.handleCacheMiss.bind(this); 25 | this.storeDocuments = this.storeDocuments.bind(this); 26 | this.insertRedisKey = this.insertRedisKey.bind(this); 27 | this.deepAssign = this.deepAssign.bind(this); 28 | } 29 | 30 | // Initialize your class properties here using the parameters 31 | 32 | async query(request) { 33 | try { 34 | const data = await request.json(); 35 | request.body.query = data.query; 36 | const redisKey = extractIdFromQuery(request.body.query); 37 | const start = performance.now(); 38 | const { AST, sanitizedQuery, variableValues } = await interceptQueryAndParse( 39 | request.body.query 40 | ); 41 | const obj = extractAST(AST, this.config, variableValues); 42 | const { proto, operationType, operationMutation } = obj; 43 | 44 | if (operationMutation) { 45 | const mutationResults = await graphql(this.schema, sanitizedQuery); 46 | 47 | this.clearRedisCache(request); 48 | 49 | if (redisKey) { 50 | await this.redisCache.json_set(redisKey, '$', mutationResults); 51 | } 52 | 53 | return mutationResults; 54 | } 55 | 56 | if (operationType === 'noBuns') { 57 | const queryResults = await graphql(this.schema, sanitizedQuery); 58 | return queryResults; 59 | } else if (redisKey) { 60 | let redisData = await this.redisCache.json_get(redisKey); 61 | console.log('redisdata', redisData); 62 | if (redisData) { 63 | return this.handleCacheHit(proto, redisData, start); 64 | } else { 65 | return this.handleCacheMiss(proto, start, redisKey); 66 | } 67 | } else if (!redisKey) { 68 | const queryResults = await graphql(this.schema, sanitizedQuery); 69 | console.log('queryresults test', queryResults); 70 | const key = Object.keys(queryResults.data); 71 | const doc = Object.values(queryResults.data); 72 | const docObj = Object.assign({}, doc); 73 | this.storeDocuments(docObj); 74 | console.log('returnobj: ', queryResults.returnObj); 75 | return queryResults; 76 | } else { 77 | return this.handleCacheMiss(proto, start, redisKey); 78 | } 79 | } catch (error) { 80 | console.error('GraphQL Error:', error); 81 | return { 82 | log: error.message, 83 | status: 400, 84 | message: { err: 'GraphQL query Error' }, 85 | }; 86 | } 87 | } 88 | 89 | /** 90 | * Merges specified fields from a source object into a target object, recursively handling nested objects. 91 | * Only the fields that are specified in the target object will be merged from the source object. 92 | * @param {Object} proto - The object specifying the structure and fields to be merged from redisData. 93 | * @param {Object} redisData - The source object from which data will be merged. 94 | * @returns {Object} - The resultant object after merging specified fields from redisData. 95 | */ 96 | handleCacheHit(proto, redisData, start) { 97 | const end = performance.now(); 98 | const speed = end - start; 99 | console.log('🐇 Data retrieved from Redis Cache 🐇'); 100 | console.log('🐇 cachespeed', speed, ' 🐇'); 101 | const cachedata = { cache: 'hit', speed: end - start }; 102 | const returnObj = this.deepAssign({ ...proto.fields }, redisData); 103 | return { returnObj, cachedata }; 104 | } 105 | 106 | /** 107 | * Recursively merges properties from the source object into the target object, but only if they are specified in the target object. 108 | * @param {Object} target - The object into which properties will be merged. 109 | * @param {Object} source - The object from which properties will be merged. 110 | * @returns {Object} - The target object after merging. 111 | */ 112 | deepAssign(target, source) { 113 | for (const key in target) { 114 | if (target.hasOwnProperty(key)) { 115 | if ( 116 | Object.prototype.toString.call(target[key]) === '[object Object]' && 117 | Object.prototype.toString.call(source[key]) === '[object Object]' 118 | ) { 119 | target[key] = this.deepAssign(target[key], source[key]); 120 | } else if (source.hasOwnProperty(key)) { 121 | target[key] = source[key]; 122 | } 123 | } 124 | } 125 | return target; 126 | } 127 | 128 | async handleCacheMiss(proto, start, redisKey) { 129 | const fullDocQuery = this.insertRedisKey(process.env.QUERY, redisKey); 130 | const fullDocData = (await graphql(this.schema, fullDocQuery)).data; 131 | await this.redisCache.json_set(redisKey, '$', fullDocData); 132 | const returnObj = { ...proto.fields }; 133 | 134 | for (const field in returnObj.user) { 135 | returnObj.user[field] = fullDocData.user[field]; 136 | } 137 | const end = performance.now(); 138 | const speed = end - start; 139 | console.log('🐢 Data retrieved without Cache Results', speed, ' 🐢'); 140 | const cachedata = { cache: 'miss', speed: end - start }; 141 | return { returnObj, cachedata }; 142 | } 143 | 144 | clearRedisCache(request) { 145 | this.redisCache.flushall(); 146 | return; 147 | } 148 | 149 | mergeObjects(templateObj, data, mergeObject) { 150 | // Split recursive call into helper function 151 | const performMerge = (tempObj, dataObj, mergeObj) => { 152 | for (const key in mergeObj) { 153 | if (Object.prototype.hasOwnProperty.call(mergeObj, key)) { 154 | if (dataObj[key] !== undefined) { 155 | if (typeof dataObj[key] === 'object' && dataObj[key] !== null) { 156 | mergeObj[key] = performMerge(tempObj[key], dataObj[key], mergeObj[key] || {}); 157 | } else { 158 | mergeObj[key] = dataObj[key]; 159 | } 160 | } 161 | } 162 | } 163 | return mergeObj; 164 | }; 165 | const result = performMerge(templateObj, data, mergeObject); 166 | return result; 167 | } 168 | 169 | storeDocuments(array) { 170 | array.forEach((document) => { 171 | this.redisCache.json_set(document.id, '$', { user: document }); 172 | }); 173 | } 174 | 175 | insertRedisKey(query, redisKey) { 176 | const index = query.indexOf('id:'); // Find the index of "id:" 177 | if (index === -1) { 178 | throw new Error('Query string does not contain "id:"'); 179 | } 180 | const before = query.substring(0, index + 4); // Extract the substring before and including "id:" 181 | const after = query.substring(index + 4); // Extract the substring after "id:" 182 | return `${before}"${redisKey}"${after}`; // Insert the redisKey in between 183 | } 184 | 185 | // partial queries: 186 | // if user is querying the same id: but some of the wanted values are null -> 187 | // iterate through the object - 188 | 189 | // * This is the closing bracket for the whole class! 190 | } 191 | -------------------------------------------------------------------------------- /bunDL-server/src/helpers/caching-logic.js: -------------------------------------------------------------------------------- 1 | import { getFromRedis, writeToCache } from './redisHelper'; 2 | 3 | const checkCache = async (redisKey) => { 4 | //create cache key by stringifying the proto 5 | let cachedResult; 6 | 7 | // retrieve data from getfromredis passing in cachekey 8 | const cachedData = await getFromRedis(redisKey); 9 | 10 | //if cachedData exists 11 | if (cachedData) { 12 | //turns result back to object 13 | cachedResult = JSON.parse(cachedData); 14 | 15 | //return cached result 16 | } 17 | return cachedResult; 18 | }; 19 | 20 | export default checkCache; 21 | 22 | // check redis cache for current key we want 23 | // 24 | -------------------------------------------------------------------------------- /bunDL-server/src/helpers/intercept-and-parse-logic.js: -------------------------------------------------------------------------------- 1 | const { parse } = require('graphql'); 2 | 3 | const interceptQueryAndParse = async (request) => { 4 | // Check if there's a query in the request body and it's a string 5 | 6 | // * done? todo: review and refactor 7 | 8 | if (!request || typeof request !== 'string') { 9 | throw new Error('No query found on request body or query is not a string.'); 10 | } 11 | 12 | // You can enhance this further as needed. 13 | 14 | const sanitizedQuery = request; 15 | 16 | let AST; 17 | try { 18 | // Parse the sanitized query to produce the AST 19 | AST = parse(sanitizedQuery); 20 | } catch (error) { 21 | throw new Error('Error parsing the GraphQL query: ' + error.message); 22 | } 23 | 24 | // currently NOT USED: variableValues -- potential use case: dynamic variables, but static variables want to store as keys (marker) 25 | const variableValues = request.variables || {}; 26 | // Return the AST and sanitized query 27 | return { AST, sanitizedQuery, variableValues }; 28 | }; 29 | 30 | export default interceptQueryAndParse; 31 | -------------------------------------------------------------------------------- /bunDL-server/src/helpers/pouchdbHelpers.js: -------------------------------------------------------------------------------- 1 | //puchdb has to be in the server to make the call to couchdb to sync with couchdb 2 | 3 | const PouchDB = require('pouchdb'); 4 | const db = new PouchDB('users'); 5 | 6 | const storeResultsInPouchDB = (queryResults) => { 7 | console.log(queryResults.users); 8 | const dataWithID = queryResults.users.map((user, index) => { 9 | return { 10 | _id: `user_${index}`, 11 | ...user, 12 | }; 13 | }); 14 | 15 | const updatedResults = { 16 | data: { 17 | users: dataWithID, 18 | }, 19 | }; 20 | 21 | console.log(updatedResults); 22 | 23 | if (updatedResults.users.length === 1) { 24 | db.put(user) 25 | .then((response) => { 26 | console.log('Document inserted successfully', response); 27 | }) 28 | .catch((error) => { 29 | console.log('Error inserting document', error); 30 | }); 31 | } else { 32 | db.bulkDocs(updatedResults.data.users) 33 | .then((response) => { 34 | console.log('Documents inserted successfully', response); 35 | }) 36 | .catch((error) => { 37 | console.log('Error inserting documents', error); 38 | }); 39 | } 40 | }; 41 | 42 | export default storeResultsInPouchDB; 43 | -------------------------------------------------------------------------------- /bunDL-server/src/helpers/prototype-logic.js: -------------------------------------------------------------------------------- 1 | import { visit, BREAK } from 'graphql'; 2 | 3 | function extractAST(AST, config, variables = {}) { 4 | let operationType = ''; 5 | let operationMutation = false; 6 | const setPath = []; 7 | const proto = { 8 | fields: {}, 9 | }; 10 | let fragsDefinitions = {}; 11 | let primaryQueryType = ''; 12 | let fragmentType = ''; 13 | let variableValues = {}; 14 | 15 | function setNestedProperty(obj, pathArray, value) { 16 | let current = obj; 17 | for (let i = 0; i < pathArray.length; i++) { 18 | const key = pathArray[i]; 19 | if (typeof current[key] === 'boolean' || !current[key]) { 20 | // If the key doesn't exist or it's a primitive, set it to an empty object 21 | current[key] = {}; 22 | } 23 | // If it's the last key in the path, set the value 24 | if (i === pathArray.length - 1) { 25 | if (typeof value === 'boolean') current[key] = value; 26 | else if (typeof value === 'object') current[key].subdata = value; 27 | else current[key] = value; 28 | } else { 29 | // Otherwise, traverse deeper 30 | current = current[key]; 31 | } 32 | } 33 | } 34 | 35 | visit(AST, { 36 | FragmentDefinition(node) { 37 | const fragName = node.name.value; 38 | fragsDefinitions[fragName] = {}; 39 | for (const selections of node.selectionSet.selections) { 40 | if (selections.kind !== 'InlineFragment') { 41 | fragsDefinitions[fragName][selections.name.value] = true; 42 | } 43 | } 44 | }, 45 | }); 46 | 47 | let hasArguments = false; 48 | visit(AST, { 49 | Argument() { 50 | hasArguments = true; 51 | return BREAK; 52 | }, 53 | }); 54 | 55 | if (!hasArguments && config.requireArguments) { 56 | // return { proto: null, operationType: 'noArguments' }; 57 | return { proto: null, operationType: 'noBuns' }; 58 | } 59 | 60 | visit(AST, { 61 | enter(node) { 62 | //conditionals within queries (skip this field, or include this field) 63 | // @ symbol = directives in the discord example ken pasted: FetchUserData 64 | if (node.directives && node.directives.length > 0) { 65 | operationType = 'noBuns'; 66 | return BREAK; 67 | } 68 | }, 69 | OperationDefinition(node) { 70 | operationType = node.operation; 71 | if (operationType === 'mutation') { 72 | operationMutation = true; 73 | } 74 | // operation = operationType; 75 | 76 | if (node.selectionSet.selections[0].typeCondition) { 77 | primaryQueryType = node.selectionSet.selections[0].typeCondition.name.value; 78 | } else { 79 | primaryQueryType = node.selectionSet.selections[0].name.value; 80 | } 81 | 82 | if (operationType === 'subscription') { 83 | operationType = 'noBuns'; 84 | return BREAK; 85 | } 86 | }, 87 | 88 | Variable(node, key, parent, path, ancestors) { 89 | if (!config.cacheVariables) { 90 | operationType = 'noBuns'; 91 | return BREAK; 92 | } 93 | 94 | let fieldName; 95 | if (ancestors[ancestors.length - 2].kind === 'Field') { 96 | fieldName = ancestors[ancestors.length - 2].name.value; 97 | } 98 | if (variables && fieldName) { 99 | for (let [key, value] of Object.entries(variables)) { 100 | variableValues[fieldName] = variableValues[fieldName] || {}; 101 | variableValues[fieldName][key] = value; 102 | } 103 | } 104 | }, 105 | 106 | Argument(node, key, parent, path, ancestors) { 107 | function deepCheckArg(arg) { 108 | if (arg.kind === 'ObjectValue' || arg.kind === 'NullValue' || arg.kind === 'ListValue') { 109 | operationType = 'noBuns'; 110 | return BREAK; 111 | } else if (arg.kind === 'Variable' && config.cacheVariables) { 112 | return arg.name.value; 113 | } else { 114 | if (ancestors[ancestors.length - 1].kind === 'Field') { 115 | const fieldName = ancestors[ancestors.length - 1].name.value; 116 | variableValues[fieldName] = variableValues[fieldName] || {}; 117 | variableValues[fieldName][node.name.value] = arg.value; 118 | } 119 | return arg.value; 120 | } 121 | } 122 | deepCheckArg(node.value); 123 | }, 124 | 125 | Field: { 126 | enter(node, key, parent) { 127 | if (node.name.value.includes('__')) { 128 | operationType = 'noBuns'; 129 | return BREAK; 130 | } 131 | if (node.directives && node.directives.length) { 132 | operationType = 'noBuns'; 133 | return BREAK; 134 | } 135 | // Use the original field name as the key in proto structure 136 | const fieldSubdata = { 137 | // The actual field name as specified in the GraphQL query (e.g., "name" not "firstName") 138 | name: node.name.value, 139 | args: node.arguments 140 | ? // An array of arguments if they exist, otherwise an empty array. 141 | node.arguments.map((arg) => ({ 142 | // The name of the argument. 143 | name: arg.name.value, 144 | // The value of the argument. 145 | value: arg.value.value, 146 | })) 147 | : null, 148 | // The alias of the field if it exists, otherwise null. 149 | alias: node.alias ? node.alias.value : null, 150 | // Currently always set to null. Acting as a placeholder for now 151 | type: null, 152 | }; 153 | 154 | // Push to path based on alias if it exists or field name if it doesn't. 155 | const pathName = node.alias ? node.alias.value : node.name.value; 156 | setPath.push(pathName); 157 | 158 | if (config.cacheMetadata) { 159 | setNestedProperty(proto.fields, setPath, fieldSubdata); 160 | } else { 161 | setNestedProperty(proto.fields, setPath, true); 162 | } 163 | 164 | if (node.selectionSet) { 165 | for (const selection of node.selectionSet.selections) { 166 | if (selection.kind === 'FragmentSpread') { 167 | const fragmentFields = fragsDefinitions[selection.name.value]; 168 | for (let fieldName in fragmentFields) { 169 | setNestedProperty(proto.fields, setPath.concat([fieldName]), true); 170 | } 171 | } 172 | } 173 | } 174 | }, 175 | leave() { 176 | setPath.pop(); 177 | }, 178 | }, 179 | 180 | SelectionSet: { 181 | enter(node, key, parent) { 182 | if (parent && !Array.isArray(parent) && parent.kind === 'Field') { 183 | const fieldsValues = {}; 184 | let fragment = false; 185 | 186 | for (const field of node.selections) { 187 | if (field.kind === 'FragmentSpread') fragment = true; 188 | if ( 189 | field.kind !== 'InlineFragment' && 190 | (field.kind === 'FragmentSpread' || !field.selectionSet) 191 | ) { 192 | fieldsValues[field.name.value] = true; 193 | } 194 | } 195 | } 196 | }, 197 | leave() { 198 | setPath.pop(); 199 | }, 200 | }, 201 | }); 202 | return { 203 | proto, 204 | operationType, 205 | operationMutation, 206 | fragsDefinitions, 207 | primaryQueryType, 208 | fragmentType, 209 | variableValues, 210 | }; 211 | } 212 | 213 | export default extractAST; 214 | -------------------------------------------------------------------------------- /bunDL-server/src/helpers/queryObjectFunctions.js: -------------------------------------------------------------------------------- 1 | /** refactored from quell - ALL UNUSED right now 2 | * Iterates through passed-in proto object, removing fields retrieved from the Redis cache. 3 | * The returned object is intended to be passed to `convertQueryObjToString` for further processing. 4 | * @param {Object} proto 5 | * @returns object 6 | */ 7 | 8 | //======== proto reducer ============ 9 | const filterOutCachedResults = function (proto) { 10 | const dbQueryObj = {}; 11 | for (const key in proto) { 12 | const reducedProto = this.extractFalseValueKeys(proto[key]); 13 | if (reducedProto.length > 0) dbQueryObj[key] = reducedProto; 14 | } 15 | return dbQueryObj; 16 | }; 17 | 18 | /** 19 | * Iteratively and recursively extracts keys with `false` values from an object, including those within nested objects. Returns an array containing keys associated with `false` values and objects for nested content. 20 | * 21 | * @param {Object} proto - The object from which to extract keys. 22 | * @returns {Array} An array of extracted keys and nested objects. 23 | */ 24 | 25 | //============= build Item ================= 26 | const extractFalseValueKeys = function (proto) { 27 | const fields = []; 28 | for (const key in proto) { 29 | if (proto[key] === false) fields.push(key); 30 | if (typeof proto[key] === 'object') { 31 | const nestedObj = {}; 32 | const reducedProto = this.extractFalseValueKeys(proto[key]); 33 | if (reducedProto.length > 0) { 34 | nestedObj[key] = reducedProto; 35 | fields.push(nestedObj); 36 | } 37 | } 38 | } 39 | return fields; 40 | }; 41 | 42 | /** 43 | * Converts a given query object into a properly formatted GraphQL query string. 44 | * If the query object contains strings, they are added directly. If it contains arrays or nested objects, the function processes them recursively to build the query string. 45 | * @param {Object} queryObject - The object to be converted into a GraphQL query string. 46 | * @returns {string} A GraphQL formatted query string. 47 | */ 48 | 49 | //==============create query string=================== 50 | const convertQueryObjectToString = function (queryObject) { 51 | const stringifyQuery = (item) => { 52 | if (typeof item === 'string') { 53 | return item; 54 | } 55 | if (Array.isArray(item)) { 56 | return item.map(stringifyQuery).join(' '); 57 | } 58 | return Object.entries(item) 59 | .map(([key, value]) => `${key} { ${this.stringifyQuery(value)} }`) 60 | .join(' '); 61 | }; 62 | return `{ ${stringifyQuery(queryObject)} }`; 63 | }; 64 | 65 | const convertGraphQLQueryToObject = function (queryString, redisKey) { 66 | const lines = queryString.trim().split(/\n/); 67 | const stack = []; 68 | let currentObject = {}; 69 | let root = currentObject; 70 | 71 | lines.forEach((line) => { 72 | line = line.trim(); 73 | if (line.endsWith('{')) { 74 | const key = line.slice(0, -1).trim(); 75 | const newObj = {}; 76 | currentObject[key] = newObj; 77 | stack.push(currentObject); 78 | currentObject = newObj; 79 | } else if (line === '}') { 80 | currentObject = stack.pop(); 81 | } else { 82 | const key = line.trim(); 83 | currentObject[key] = null; 84 | } 85 | }); 86 | 87 | // const test = Object.keys(root); 88 | const test = { ...{ ...root } }; 89 | 90 | return root; 91 | }; 92 | 93 | // !==========================================================// 94 | //============ join responses ==============// 95 | const joinResponses = async function (cachedArray, uncachedArray) { 96 | const joinedArray = []; 97 | for (let i = 0; i < uncachedArray.lengt; i++) { 98 | const joinedItem = await this.recursiveJoin(cachedArray[i], uncachedArray[i]); 99 | joinedArray.push(joinedItem); 100 | } 101 | return joinedArray; 102 | }; 103 | 104 | const recursiveJoin = async function (cachedItem, uncachedItem) { 105 | const joinedObject = { ...cachedItem }; 106 | for (const field in uncachedItem) { 107 | if (Array.isArray(uncachedItem[field])) { 108 | if (typeof uncachedItem[field][0] === 'string') { 109 | const temp = await Promise.all( 110 | uncachedItem[field].map((refernce) => this.fetchItemFromCache(reference)) 111 | ); 112 | uncachedItem[field] = temp; 113 | } 114 | joinedObject[field] = cachedItem[field] 115 | ? await this.joinResponses(cachedItem[field], uncachedItem[field]) 116 | : uncachedItem[field]; 117 | } else { 118 | joinedObject[field] = uncachedItem[field]; 119 | } 120 | } 121 | return joinedObject; 122 | }; 123 | 124 | const extractIdFromQuery = (queryString) => { 125 | const regex = /id:\s*"([^"]+)"/; 126 | const match = queryString.match(regex); 127 | return match ? match[1] : null; 128 | }; 129 | 130 | export { 131 | filterOutCachedResults, 132 | extractFalseValueKeys, 133 | convertQueryObjectToString, 134 | extractIdFromQuery, 135 | convertGraphQLQueryToObject, 136 | }; 137 | -------------------------------------------------------------------------------- /bunDL-server/src/helpers/redisConnection.js: -------------------------------------------------------------------------------- 1 | // import Redis from 'ioredis'; 2 | import RedisReJSON from 'ioredis-rejson'; 3 | // import Redis from 'ioredis-rejson'; 4 | 5 | const redisPort = Number(process.env.REDIS_PORT); 6 | const redisHost = process.env.REDIS_HOST; 7 | const redisPassword = process.env.REDIS_PASSWORD; 8 | 9 | const redisCacheMain = new RedisReJSON({ 10 | host: redisHost, 11 | port: redisPort, 12 | // password: redisPassword, 13 | }); 14 | 15 | redisCacheMain.on('error', (error) => { 16 | console.error(`Error when trying to connect to redisCacheMain: ${error}`); 17 | }); 18 | 19 | redisCacheMain.on('connect', () => { 20 | console.log('Connected to redisCacheMain'); 21 | }); 22 | 23 | export default redisCacheMain; 24 | -------------------------------------------------------------------------------- /bunDL-server/src/helpers/redisHelper.js: -------------------------------------------------------------------------------- 1 | import redisCacheMain from './redisConnection'; 2 | 3 | // connection to Redis server 4 | const redisCache = redisCacheMain; 5 | 6 | const getFromRedis = async (key) => { 7 | if (typeof key !== 'string') return; 8 | try { 9 | const redisResult = await redisCache.json_get(key); 10 | return redisResult; 11 | } catch (error) { 12 | const err = { 13 | log: `Error in RedisCache trying to getFromRedis, ${error}`, 14 | status: 400, 15 | message: { 16 | err: 'Error in getFromRedis. Check server log for more details.', 17 | }, 18 | }; 19 | } 20 | }; 21 | 22 | const writeToCache = async (key, value, expireTime = null) => { 23 | try { 24 | await redisCache.set(key, value); 25 | 26 | // if there is an expire time 27 | if (expireTime) { 28 | // set an expire time 29 | await redisCache.expire(key, expireTime); 30 | } 31 | } catch (error) { 32 | const err = { 33 | log: `Error in RedisCache trying to setToRedis, ${error}`, 34 | status: 400, 35 | message: { 36 | err: 'Error in setToRedis. Check serverlog for more details', 37 | }, 38 | }; 39 | } 40 | }; 41 | 42 | export { getFromRedis, writeToCache }; //writeToCache 43 | 44 | export const getRedisInfo = ( 45 | options = { 46 | // getStats: true, 47 | getKeys: true, 48 | getValues: true, 49 | } 50 | ) => { 51 | const middleware = []; 52 | 53 | /* 54 | helper function within getRedisInfo that returns what data from redis is available 55 | and retrievable based on the passed in keys and values (options) 56 | 57 | @input: object (opts) (boolean value for key and values) 58 | @output: string (which data to retrieve) 59 | */ 60 | const getOptions = (opts) => { 61 | const { getKeys, getValues } = opts; 62 | if (getKeys && !getValues) return 'getKeysOnly'; 63 | // else if (getKeys && !getValues) return "dontGetValues"; 64 | else return 'getKeysAndValues'; 65 | }; 66 | 67 | switch (getOptions(options)) { 68 | // we always need keys here - to get values 69 | case 'getKeysOnly': 70 | middleware.push(getRedisKeys); 71 | break; 72 | case 'getKeysAndValues': 73 | middleware.push(getRedisKeys, getRedisValues); 74 | break; 75 | } 76 | 77 | return middleware; 78 | }; 79 | 80 | /* get key names from Redis -> add to response 81 | @inputs: req res next (express) */ 82 | export const getRedisKeys = (req, res, next) => { 83 | redisCache 84 | .keys('*') 85 | .then((response) => { 86 | res.locals.redisKeys = response; 87 | return next(); 88 | }) 89 | .catch((error) => { 90 | const err = { 91 | log: `Error occurred in getRedisKeys function: ${error}`, 92 | status: 400, 93 | message: { 94 | err: 'Error occurred in getRedisKeys', 95 | }, 96 | }; 97 | return next(err); 98 | }); 99 | }; 100 | 101 | /* get values associated with keys from Redis 102 | @inputs: req res next (express) */ 103 | export const getRedisValues = (req, res, next) => { 104 | if (res.locals.redisKeys && res.locals.redisKeys.length !== 0) { 105 | redisCache 106 | // 'multi-get' method used with Redis to fetch multipel values for a list of keys 107 | .mget(res.locals.redisKeys) 108 | .then((response) => { 109 | res.locals.redisValues = response; 110 | return next(); 111 | }) 112 | .catch((error) => { 113 | const err = { 114 | log: `error occurred in getRedisvalues, ${error}`, 115 | status: 400, 116 | message: { 117 | err: 'Error in redis = getRedisValues, Check server logs', 118 | }, 119 | }; 120 | return next(err); 121 | }); 122 | } else { 123 | res.locals.redisValues = []; 124 | return next(); 125 | } 126 | }; 127 | --------------------------------------------------------------------------------