├── .dockerignore ├── .gitignore ├── .prettierrc ├── .template.env ├── .vscode └── settings.json ├── Dockerfile ├── README.md ├── cachemunk ├── .npmignore ├── README.md ├── cachemunk-1.0.0.tgz ├── package-lock.json ├── package.json ├── src │ └── cache.ts └── tsconfig.json ├── client ├── public │ └── images │ │ ├── circle_logo.png │ │ ├── happy_cachemunk_png_transparent.png │ │ ├── image-1.png │ │ └── sad_cachemunk_png_transparent.png ├── src │ ├── App.tsx │ ├── components │ │ ├── CacheMetricsChart.tsx │ │ ├── CacheStatus.tsx │ │ ├── CacheSwitch.tsx │ │ ├── ClearCacheButton.tsx │ │ ├── CustomInsertQuery.tsx │ │ ├── CustomSelectQuery.tsx │ │ ├── FrequencyDistribution.tsx │ │ ├── FrequencyDistribution2.tsx │ │ ├── Header.tsx │ │ ├── QueryBox.tsx │ │ ├── QueryResultBox.tsx │ │ ├── ResponseTimeChart.tsx │ │ ├── SubmitButton.tsx │ │ ├── SummaryBarChart.tsx │ │ └── SummaryContainer.tsx │ ├── data │ │ ├── responseTimes-cache.json │ │ └── responseTimes-no-cache.json │ ├── index.html │ ├── index.tsx │ ├── stylesheets │ │ └── styles.css │ └── theme.ts ├── tsconfig.json └── webpack.config.js ├── compose.yml ├── eslint.config.js ├── package-lock.json ├── package.json ├── psql-cities-data └── world.sql ├── server ├── src │ ├── analytics.ts │ ├── benchmarks │ │ ├── benchmark.ts │ │ ├── benchmarkWrite.ts │ │ └── mock │ │ │ ├── data.ts │ │ │ └── generator.ts │ ├── cache │ │ ├── cache.ts │ │ └── redisClient.ts │ ├── controllers │ │ ├── cacheSize.ts │ │ ├── cachingController.ts │ │ ├── deleteCache.ts │ │ ├── dynamicController.ts │ │ ├── errorHandling.ts │ │ └── insertCity.ts │ ├── db.ts │ ├── queries │ │ ├── queries.ts │ │ └── queriesMap.ts │ ├── routers │ │ ├── cacheRouter.ts │ │ ├── dataRouter.ts │ │ └── no-cacheRouter.ts │ ├── server.ts │ └── util │ │ ├── savedStats │ │ ├── responseTimes-cache.json │ │ └── responseTimes-no-cache.json │ │ ├── stats.ts │ │ └── timing.ts └── tsconfig.json └── test ├── src ├── cache.test.ts ├── data.test.ts ├── stats.test.ts └── timing.test.ts └── tsconfig.json /.dockerignore: -------------------------------------------------------------------------------- 1 | .env -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | compose-test.yml 2 | Dockerfile-test 3 | server_test.js 4 | compose-test copy.yml 5 | 6 | # Dependency directories 7 | node_modules/ 8 | /cachemunk/node_modules/ 9 | 10 | # Environment configuration files 11 | .env 12 | .env.local 13 | .env.development.local 14 | .env.test.local 15 | .env.production.local 16 | 17 | # Logs 18 | logs 19 | *.log 20 | npm-debug.log* 21 | yarn-debug.log* 22 | yarn-error.log* 23 | 24 | # Runtime data 25 | pids 26 | *.pid 27 | *.seed 28 | *.pid.lock 29 | 30 | # Compiled files 31 | /dist 32 | /build 33 | /out 34 | /server/dist 35 | /client/dist 36 | /test/dist 37 | /cachemunk/dist 38 | 39 | # TypeScript compiled files 40 | *.tsbuildinfo 41 | 42 | # Optional eslint cache 43 | .eslintcache 44 | 45 | # zip file for initial aws deployment 46 | *.zip 47 | 48 | # jest coverage 49 | coverage/ 50 | 51 | # Miscellaneous 52 | .DS_Store 53 | Thumbs.db 54 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "jsxSingleQuote": true, 4 | "trailingComma": "all", 5 | "printWidth": 100, 6 | "tabWidth": 2, 7 | "semi": true, 8 | "bracketSpacing": true, 9 | "arrowParens": "always" 10 | } 11 | -------------------------------------------------------------------------------- /.template.env: -------------------------------------------------------------------------------- 1 | # Local test instance of redis 2 | REDIS_HOST="127.0.0.1" 3 | REDIS_PORT=6379 4 | 5 | # Local test instance of PostgreSQL 6 | PG_HOST="" 7 | PG_PORT=5432 8 | PG_USER="" 9 | PG_PASSWORD="" 10 | PG_DB="" 11 | 12 | # # AWS RDS connection information 13 | # PG_HOST="" 14 | # PG_PORT=5432 15 | # PG_USER="" 16 | # PG_PASSWORD="" 17 | # PG_DB="" -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "eslint.experimental.useFlatConfig": true, 3 | "editor.defaultFormatter": "esbenp.prettier-vscode", 4 | "editor.formatOnSave": true 5 | } 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use the official Node.js slim image as the base 2 | FROM node:slim 3 | 4 | # Set the working directory inside the container 5 | WORKDIR /app 6 | 7 | # Copy package.json and package-lock.json to the working directory 8 | COPY package*.json ./ 9 | 10 | # Install the application dependencies 11 | RUN npm ci 12 | 13 | # Copy the application code to the working directory 14 | COPY . . 15 | 16 | # Expose the port on which the application will run (replace 3000 with your app's port) 17 | EXPOSE 3030 18 | 19 | # Define the command to run the application 20 | CMD ["npm", "start"] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CacheMunk 2 | ![CacheMunk Banner](https://cachemunk-readme.s3.amazonaws.com/cachemunk_logo_banner.png) 3 | Application layer caching middleware library for Node.js and Redis. 4 | ## 1. Description 5 | Efficiently abstracts Redis to cache PostgreSQL query results at the application layer, enhancing Node.js applications with submillisecond latency. 6 | ## 2. Dependencies 7 | - **ioredis:** robust, performant, flexible Redis client for Node.js 8 | - **snappy:** fast data compression and decompression library, optimized for speed with a reasonable compression ratio 9 | ## 3. Features 10 | - **Redis Integration**: Utilizes `ioredis` for Redis interactions, supporting both string and buffer data types. 11 | - **Data Compression**: Implements data compression using `snappy` to reduce storage and bandwidth usage. 12 | - **Configurable Options**: Allows setting of default TTL, maximum entry size, and custom event handlers for cache hits and misses. 13 | - **Performance Monitoring**: Measures execution times for caching operations using high-resolution timestamps. 14 | - **Layered Caching**: Features a first-level cache with a JavaScript `Map` for ultra-fast data retrieval. 15 | - **Data Consistency**: Uses Redis transactions (pipelining) to ensure data integrity across multiple operations. 16 | - **Cache Invalidation**: Provides methods to invalidate cache based on dependencies to handle stale data. 17 | - **Error Handling**: Includes robust error management and size checks to prevent exceeding maximum entry size. 18 | - **Cache Management Tools**: Offers functions to clear the cache, measure its size, and count the number of string keys. 19 | 20 | ## 4. Prerequisite: Install and Connect a Redis Server 21 | 22 | If not already installed on your server, install Redis OSS (Redis Open Source Software) 23 | 24 | - macOS using Homebrew: 25 | - At the terminal, type `brew install redis` 26 | - After installation completes, type `redis-server` 27 | - Your server should now have a Redis database connection open (note the port on which it is listening) 28 | - See more detailed instructions in the [Redis docs: Install Redis OSS on macOS](https://redis.io/docs/latest/operate/oss_and_stack/install/install-redis/install-redis-on-mac-os/) 29 | - Ubuntu Linux : 30 | - You can install recent stable versions of Redis from the official packages.redis.io APT repository. 31 | - Add the repository to the apt index, update it, and then install: 32 | ``` 33 | curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg 34 | 35 | echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list 36 | 37 | sudo apt-get update 38 | sudo apt-get install redis 39 | ``` 40 | - After installation completes, run the following command to start the server 41 | ``` 42 | sudo systemctl start redis-server 43 | ``` 44 | - Your server should now have a Redis database connection open. You can use the command below to see a detailed status report (note the port on which it is listening) 45 | ``` 46 | sudo systemctl status redis-server 47 | ``` 48 | ## 6. Usage 49 | ### Install CacheMunk 50 | Install the cachemunk library using npm. `ioredis` and `snappy` dependencies will be installed if needed. 51 | ``` 52 | npm install cachemunk 53 | ``` 54 | ### Import the Library and `ioredis` 55 | ``` 56 | import { Redis } from 'ioredis'; 57 | import { configureCache } from 'cachemunk'; 58 | ``` 59 | ### Instantiate Redis 60 | ``` 61 | const redis = new Redis({ 62 | host: '127.0.0.1', // modify as needed or pass in as env variable 63 | port: 6379 // modify as needed or pass in as env variable 64 | }); 65 | 66 | ``` 67 | ### Configure the Cache 68 | ``` 69 | const cache = configureCache({ 70 | redis, 71 | defaultTtl: 3600, // optional, defaults to 3600 seconds (1 hour) 72 | maxEntrySize: 5000000, // optional, defaults to 5MB 73 | onCacheHit: (queryKey, executionTime) => { 74 | console.log(`Cache hit for key: ${queryKey} in ${executionTime}ms`); 75 | }, 76 | onCacheMiss: (queryKey, executionTime) => { 77 | console.log(`Cache miss for key: ${queryKey} in ${executionTime}ms`); 78 | } 79 | }); 80 | ``` 81 | 82 | - **redis (required)** 83 | - **Type**: `Redis` 84 | - **Description**: The instance of ioredis to use for caching. 85 | 86 | - **defaultTtl (optional)** 87 | - **Type**: `number` 88 | - **Default**: `3600` (1 hour) 89 | - **Description**: The default time-to-live (TTL) for cache entries in seconds. 90 | 91 | - **maxEntrySize (optional)** 92 | - **Type**: `number` 93 | - **Default**: `5000000` (5 MB) 94 | - **Description**: The maximum size of a cache entry in bytes. Entries larger than this will not be cached. 95 | 96 | - **onCacheHit (optional)** 97 | - **Type**: `EventHandler` 98 | - **Description**: A callback function to handle cache hits. It receives the query key and execution time in milliseconds as parameters. 99 | 100 | - **onCacheMiss (optional)** 101 | - **Type**: `EventHandler` 102 | - **Description**: A callback function to handle cache misses. It receives the query key and execution time in milliseconds as parameters. 103 | 104 | 105 | ## Functions 106 | 107 | configureCache() is a factory function that returns the following methods for you to use wherever needed. 108 | 109 | #### `set(queryKey: string, data: string | Buffer, dependencies: string[], ttlInSeconds?: number): Promise` 110 | Adds a query result to the cache. 111 | 112 | - **queryKey:** The key under which the data is stored. 113 | - **data:** The data to be cached, either as a string or a Buffer. 114 | - **dependencies:** An array of dependencies associated with this cache entry. 115 | - **ttlInSeconds:** Time-to-live for the cache entry in seconds (optional, defaults to the configured defaultTtl). 116 | 117 | #### `get(queryKey: string): Promise` 118 | Retrieves a cached query result. 119 | 120 | - **queryKey:** The key of the cached data to retrieve. 121 | - **Returns:** The cached data as a string, or null if the data is not found. 122 | 123 | #### `invalidate(dependency: string): Promise` 124 | Invalidates cache entries based on a dependency. 125 | 126 | - **dependency:** The dependency key whose associated cache entries need to be invalidated. 127 | 128 | #### `clear(): Promise` 129 | Clears the entire cache for the current Redis database. 130 | 131 | #### `getSize(): Promise` 132 | Returns the total number of keys in the current Redis database. 133 | 134 | #### `getStringKeySize(): Promise` 135 | Returns the number of string keys in the current Redis database. 136 | 137 | ## Integrating with PostgreSQL 138 | 139 | To integrate with PostgreSQL, create your Pool instance and write your own wrapper functions that call functions from the cachemunk library to set, get and invalidate query results. Here are some simple examples. 140 | 141 | ### Create a new Pool Instance 142 | 143 | ``` 144 | import { Pool } from 'pg'; 145 | 146 | const pool = new Pool({ 147 | user: 'postgres', // Replace with your PostgreSQL user 148 | host: 'localhost', // Replace with your PostgreSQL host 149 | database: 'mydatabase', // Replace with your PostgreSQL database 150 | password: 'mypassword', // Replace with your PostgreSQL password 151 | port: 5432, // Replace with your PostgreSQL port 152 | }); 153 | ``` 154 | 155 | ### Define a query function 156 | 157 | ``` 158 | export const query = async (text, params) => { 159 | try { 160 | const result = await pool.query(text, params); 161 | return result; 162 | } catch (err) { 163 | console.error(`Error executing query ${text}:`, err); 164 | throw err; // Re-throw the error after logging it 165 | } 166 | }; 167 | ``` 168 | 169 | ### Define a getData() function that uses cachemunk `get` and `set` 170 | 171 | ``` 172 | // Define getData function 173 | const getData = async (queryKey, queryText, dependencies) => { 174 | // Check the cache 175 | const cachedResult = await cache.get(queryKey); 176 | if (cachedResult) { 177 | console.log('Cache hit'); 178 | return JSON.parse(cachedResult); 179 | } 180 | 181 | // Query the database if not cached 182 | const result = await query(queryText); 183 | 184 | // Cache the result 185 | await cache.set(queryKey, JSON.stringify(result.rows), dependencies); 186 | 187 | console.log('Cache miss'); 188 | return result.rows; 189 | }; 190 | ``` 191 | 192 | ### Define an insertData() function that uses cachemunk `invalidate` 193 | 194 | ``` 195 | // Define insertData function with cache invalidation 196 | const insertData = async (insertText, dependencies) => { 197 | // Execute the insert query 198 | try { 199 | const result = await query(insertText); 200 | 201 | // Invalidate the cache for the specific dependencies 202 | await cache.invalidate(dependencies); 203 | 204 | console.log('Cache invalidated for dependencies:', dependencies); 205 | return result; 206 | } catch (err) { 207 | console.error(`Error executing insert ${insertText}:`, err); 208 | throw err; // Re-throw the error after logging it 209 | } 210 | }; 211 | 212 | ``` 213 | 214 | ## Example - Test Factory Functions with Redis Only 215 | 216 | You can copy, paste, and run the code the code below to see these functions in action! 217 | 218 | ``` 219 | import { Redis } from 'ioredis'; 220 | import { configureCache } from 'cachemunk'; 221 | 222 | const redis = new Redis({ 223 | host: '127.0.0.1', // modify as needed or pass in as env variable 224 | port: 6379 // modify as needed or pass in as env variable 225 | }); 226 | 227 | const cache = configureCache({ 228 | redis, 229 | defaultTtl: 3600, 230 | maxEntrySize: 5000000, 231 | onCacheHit: (queryKey, executionTime) => { 232 | console.log(`Cache hit for key: ${queryKey} in ${executionTime}ms`); 233 | }, 234 | onCacheMiss: (queryKey, executionTime) => { 235 | console.log(`Cache miss for key: ${queryKey} in ${executionTime}ms`); 236 | } 237 | }); 238 | 239 | async function testCache() { 240 | try { 241 | console.log('\n===================='); 242 | console.log('Testing cache.set and cache.get'); 243 | console.log('====================\n'); 244 | 245 | // Call the set function with a queryKey, data, and an array of dependencies 246 | await cache.set('testKey', 'testValue', []); 247 | 248 | // Call the get function to retrieve the cached value 249 | let value = await cache.get('testKey'); 250 | console.log("\ntestKey:", value); // Should print 'testValue' 251 | 252 | console.log('\n===================='); 253 | console.log('Testing cache.invalidate'); 254 | console.log('====================\n'); 255 | 256 | // Call the set function with a queryKey, data, and an array of dependencies 257 | await cache.set('testKey2', 'testValue to be invalidated', ['testDependency']); 258 | 259 | // Call the get function to retrieve the cached value 260 | value = await cache.get('testKey2'); 261 | console.log("\ntestKey2 before invalidation:", value); // Should print 'testValue to be invalidated' 262 | 263 | // Invalidate cache for a dependency 264 | await cache.invalidate('testDependency'); 265 | 266 | // Introduce a 50ms delay to clear L1 Cache 267 | await new Promise(resolve => setTimeout(resolve, 50)); 268 | 269 | // Call the get function to retrieve the invalidated value 270 | value = await cache.get('testKey2'); 271 | console.log("\ntestKey2 after invalidation:", value); // Should print null 272 | 273 | console.log('\n===================='); 274 | console.log('Adding additional keys to cache'); 275 | console.log('====================\n'); 276 | 277 | // Introduce a 50ms delay to clear L1 Cache 278 | await new Promise(resolve => setTimeout(resolve, 50)); 279 | 280 | 281 | console.log('\n===================='); 282 | console.log('Testing cache.clear'); 283 | console.log('====================\n'); 284 | 285 | // Clear the entire cache 286 | await cache.clear(); 287 | 288 | // Attempt to get the value after clearing cache 289 | value = await cache.get('testKey'); 290 | console.log("\ntestKey after clear:", value); // Should print null 291 | 292 | console.log('\n===================='); 293 | console.log('Testing cache.getSize'); 294 | console.log('====================\n'); 295 | 296 | // Add two additional keys to the cache 297 | await cache.set('additionalKey1', 'value 1', ['testDependency']); 298 | await cache.set('additionalKey2', 'value 2', []); 299 | 300 | // Check the size of the cache 301 | const size = await cache.getSize(); 302 | console.log("\nCache size:", size); // Should print the size of the cache (number of keys) 303 | 304 | console.log('\n===================='); 305 | console.log('Testing cache.getStringKeySize'); 306 | console.log('====================\n'); 307 | 308 | // Check the size of the cache for string keys 309 | const stringKeySize = await cache.getStringKeySize(); 310 | console.log("\nString key size:", stringKeySize); // Should print the number of string keys in the cache 311 | 312 | // Exit the process 313 | process.exit(0); 314 | } catch (error) { 315 | console.error('\n===================='); 316 | console.error('Error:', error); 317 | console.error('====================\n'); 318 | // Exit with error code 319 | process.exit(1); 320 | } 321 | } 322 | 323 | testCache(); 324 | 325 | ``` 326 | 327 | ## Example - Test Factory Functions with Redis and PostgreSQL 328 | 329 | ### Cachemunk Integrated with PostgreSQL 330 | 331 | Copy, paste, and run the code the code below to see the library in action, integrated with PostgreSQL and Redis. It assumes you have a Redis server and PostgreSQL server running locally with a database called "mydatabase". 332 | 333 | ``` 334 | import pkg from 'pg'; 335 | const { Pool } = pkg; 336 | import { Redis } from 'ioredis'; 337 | import { configureCache } from 'cachemunk'; 338 | 339 | // Configure Redis 340 | const redis = new Redis({ 341 | host: '127.0.0.1', // Replace with your Redis host 342 | port: 6379 // Replace with your Redis port 343 | }); 344 | 345 | // Configure Cachemunk 346 | const cache = configureCache({ 347 | redis, 348 | defaultTtl: 3600, 349 | maxEntrySize: 5000000, 350 | onCacheHit: (queryKey, executionTime) => { 351 | console.log(`Cache hit for key: ${queryKey} in ${executionTime}ms`); 352 | }, 353 | onCacheMiss: (queryKey, executionTime) => { 354 | console.log(`Cache miss for key: ${queryKey} in ${executionTime}ms`); 355 | } 356 | }); 357 | 358 | // Create a new Pool instance 359 | const pool = new Pool({ 360 | user: 'postgres', // Replace with your PostgreSQL user 361 | host: 'localhost', // Replace with your PostgreSQL host 362 | database: 'mydatabase', // Replace with your PostgreSQL database 363 | password: 'mypassword', // Replace with your PostgreSQL password 364 | port: 5432, // Replace with your PostgreSQL port 365 | }); 366 | 367 | // Define a query function 368 | const query = async (text) => { 369 | try { 370 | const result = await pool.query(text); 371 | return result; 372 | } catch (err) { 373 | console.error(`Error executing query ${text}:`, err); 374 | throw err; // Re-throw the error after logging it 375 | } 376 | }; 377 | 378 | // Define getData function 379 | const getData = async (queryKey, queryText, dependencies) => { 380 | // Check the cache 381 | await delay(50); // Delay before interacting with cache 382 | const cachedResult = await cache.get(queryKey); 383 | if (cachedResult) { 384 | console.log('Cache hit'); 385 | return JSON.parse(cachedResult); 386 | } 387 | 388 | // Query the database if not cached 389 | const result = await query(queryText); 390 | 391 | // Cache the result 392 | await delay(50); // Delay before interacting with cache 393 | await cache.set(queryKey, JSON.stringify(result.rows), dependencies); 394 | 395 | console.log('Cache miss'); 396 | return result.rows; 397 | }; 398 | 399 | // Define insertData function with cache invalidation 400 | const insertData = async (insertText, dependencies) => { 401 | // Execute the insert query 402 | try { 403 | const result = await query(insertText); 404 | 405 | // Invalidate the cache for the specific dependencies 406 | await delay(50); // Delay before interacting with cache 407 | await cache.invalidate(dependencies); 408 | 409 | console.log('Cache invalidated for dependencies:', dependencies); 410 | return result; 411 | } catch (err) { 412 | console.error(`Error executing insert ${insertText}:`, err); 413 | throw err; // Re-throw the error after logging it 414 | } 415 | }; 416 | 417 | // Define a function to fetch the entire table 418 | const fetchAllData = async () => { 419 | try { 420 | const result = await query('SELECT * FROM mytable'); 421 | return result.rows; 422 | } catch (err) { 423 | console.error('Error fetching all data:', err); 424 | throw err; 425 | } 426 | }; 427 | 428 | // Define a function to setup the database 429 | const setupDatabase = async () => { 430 | try { 431 | // Drop the table if it exists 432 | await query('DROP TABLE IF EXISTS mytable'); 433 | 434 | // Drop the sequence if it exists to avoid duplicate key issues 435 | await query('DROP SEQUENCE IF EXISTS mytable_id_seq'); 436 | 437 | // Create the table 438 | await query(` 439 | CREATE TABLE mytable ( 440 | id SERIAL PRIMARY KEY, 441 | name VARCHAR(255) NOT NULL 442 | ); 443 | `); 444 | 445 | // Insert initial dummy data 446 | await query("INSERT INTO mytable (name) VALUES ('First Entry')"); 447 | await query("INSERT INTO mytable (name) VALUES ('Second Entry')"); 448 | await query("INSERT INTO mytable (name) VALUES ('Third Entry')"); 449 | } catch (err) { 450 | console.error('Error setting up the database:', err); 451 | throw err; 452 | } 453 | }; 454 | 455 | // Define a function to clear all cache 456 | const clearCache = async () => { 457 | try { 458 | await cache.clear(); 459 | console.log('All cache keys cleared.'); 460 | } catch (err) { 461 | console.error('Error clearing cache:', err); 462 | } 463 | }; 464 | 465 | // Delay function to wait for a specified number of milliseconds 466 | const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); 467 | 468 | const main = async () => { 469 | try { 470 | // Setup the database 471 | await setupDatabase(); 472 | 473 | // Insert data and invalidate the cache 474 | const insertText1 = "INSERT INTO mytable (name) VALUES ('New Entry 1')"; 475 | const dependencies = ['dependency1']; 476 | 477 | // Delay before interacting with cache 478 | await delay(50); 479 | await insertData(insertText1, dependencies); 480 | 481 | // Delay before interacting with cache 482 | await delay(50); 483 | // Get data (should query the database and cache the result) 484 | const queryKey = 'myQueryKey'; 485 | const queryText = 'SELECT * FROM mytable'; 486 | let data = await getData(queryKey, queryText, dependencies); 487 | console.log('Data:', data); 488 | 489 | // Delay before interacting with cache 490 | await delay(50); 491 | // Perform a cache hit 492 | data = await getData(queryKey, queryText, dependencies); 493 | console.log('Data (cache hit):', data); 494 | 495 | // Delay before interacting with cache 496 | await delay(50); 497 | // Insert more data and invalidate the cache 498 | const insertText2 = "INSERT INTO mytable (name) VALUES ('New Entry 2')"; 499 | await insertData(insertText2, dependencies); 500 | 501 | // Delay before interacting with cache 502 | await delay(50); 503 | // Get data again (should query the database again due to invalidation) 504 | const dataAfterInsert = await getData(queryKey, queryText, dependencies); 505 | console.log('Data after insert:', dataAfterInsert); 506 | 507 | // Delay before interacting with cache 508 | await delay(50); 509 | // Fetch and show the entire table 510 | const allData = await fetchAllData(); 511 | console.log('All data in mytable:', allData); 512 | 513 | // Clear all cache keys 514 | console.log('Clearing all cache keys...'); 515 | await clearCache(); 516 | 517 | // Verify cache is cleared by attempting to get data 518 | console.log('Verifying cache is cleared...'); 519 | data = await getData(queryKey, queryText, dependencies); 520 | console.log('Data after cache clear:', data); 521 | 522 | } catch (error) { 523 | console.error('Error:', error); 524 | } 525 | }; 526 | 527 | main(); 528 | ``` 529 | ## Issues 530 | 531 | ## Contributors 532 | ![Olivia Carlisle](https://github.com/oliviacarlisle.png?size=100) 533 | 534 | Olivia Carlisle 535 | Github: [@oliviacarlisle](https://github.com/oliviacarlisle) 536 | 537 | ![Jayan Pillai](https://github.com/jrpillai.png?size=100) 538 | 539 | Jayan Pillai 540 | Github: [@jrpillai](https://github.com/jrpillai) 541 | 542 | ![Nick Angelopoulous](https://github.com/nickangel7.png?size=100) 543 | 544 | Nick Angelopoulous 545 | Github: [@nickangel7](https://github.com/nickangel7) 546 | 547 | ![Amy YQ Jiang](https://github.com/yj776.png?size=100) 548 | 549 | Amy YQ Jiang 550 | Github: [@yj776](https://github.com/yj776) 551 | -------------------------------------------------------------------------------- /cachemunk/.npmignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/CacheMunk/5fb97164d77057c8401aa65ebc9bb7c2c5033fee/cachemunk/.npmignore -------------------------------------------------------------------------------- /cachemunk/README.md: -------------------------------------------------------------------------------- 1 | # CacheMunk 2 | ![CacheMunk Banner](https://cachemunk-readme.s3.amazonaws.com/cachemunk_logo_banner.png) 3 | Application layer caching middleware library for Node.js and Redis. 4 | ## 1. Description 5 | Efficiently abstracts Redis to cache PostgreSQL query results at the application layer, enhancing Node.js applications with submillisecond latency. 6 | ## 2. Dependencies 7 | - **ioredis:** robust, performant, flexible Redis client for Node.js 8 | - **snappy:** fast data compression and decompression library, optimized for speed with a reasonable compression ratio 9 | ## 3. Features 10 | - **Redis Integration**: Utilizes `ioredis` for Redis interactions, supporting both string and buffer data types. 11 | - **Data Compression**: Implements data compression using `snappy` to reduce storage and bandwidth usage. 12 | - **Configurable Options**: Allows setting of default TTL, maximum entry size, and custom event handlers for cache hits and misses. 13 | - **Performance Monitoring**: Measures execution times for caching operations using high-resolution timestamps. 14 | - **Layered Caching**: Features a first-level cache with a JavaScript `Map` for ultra-fast data retrieval. 15 | - **Data Consistency**: Uses Redis transactions (pipelining) to ensure data integrity across multiple operations. 16 | - **Cache Invalidation**: Provides methods to invalidate cache based on dependencies to handle stale data. 17 | - **Error Handling**: Includes robust error management and size checks to prevent exceeding maximum entry size. 18 | - **Cache Management Tools**: Offers functions to clear the cache, measure its size, and count the number of string keys. 19 | 20 | ## 4. Prerequisite: Install and Connect a Redis Server 21 | 22 | If not already installed on your server, install Redis OSS (Redis Open Source Software) 23 | 24 | - macOS using Homebrew: 25 | - At the terminal, type `brew install redis` 26 | - After installation completes, type `redis-server` 27 | - Your server should now have a Redis database connection open (note the port on which it is listening) 28 | - See more detailed instructions in the [Redis docs: Install Redis OSS on macOS](https://redis.io/docs/latest/operate/oss_and_stack/install/install-redis/install-redis-on-mac-os/) 29 | - Ubuntu Linux : 30 | - You can install recent stable versions of Redis from the official packages.redis.io APT repository. 31 | - Add the repository to the apt index, update it, and then install: 32 | ``` 33 | curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg 34 | 35 | echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list 36 | 37 | sudo apt-get update 38 | sudo apt-get install redis 39 | ``` 40 | - After installation completes, run the following command to start the server 41 | ``` 42 | sudo systemctl start redis-server 43 | ``` 44 | - Your server should now have a Redis database connection open. You can use the command below to see a detailed status report (note the port on which it is listening) 45 | ``` 46 | sudo systemctl status redis-server 47 | ``` 48 | ## 6. Usage 49 | ### Install CacheMunk 50 | Install the cachemunk library using npm. `ioredis` and `snappy` dependencies will be installed if needed. 51 | ``` 52 | npm install cachemunk 53 | ``` 54 | ### Import the Library and `ioredis` 55 | ``` 56 | import { Redis } from 'ioredis'; 57 | import { configureCache } from 'cachemunk'; 58 | ``` 59 | ### Instantiate Redis 60 | ``` 61 | const redis = new Redis({ 62 | host: '127.0.0.1', // modify as needed or pass in as env variable 63 | port: 6379 // modify as needed or pass in as env variable 64 | }); 65 | 66 | ``` 67 | ### Configure the Cache 68 | ``` 69 | const cache = configureCache({ 70 | redis, 71 | defaultTtl: 3600, // optional, defaults to 3600 seconds (1 hour) 72 | maxEntrySize: 5000000, // optional, defaults to 5MB 73 | onCacheHit: (queryKey, executionTime) => { 74 | console.log(`Cache hit for key: ${queryKey} in ${executionTime}ms`); 75 | }, 76 | onCacheMiss: (queryKey, executionTime) => { 77 | console.log(`Cache miss for key: ${queryKey} in ${executionTime}ms`); 78 | } 79 | }); 80 | ``` 81 | 82 | - **redis (required)** 83 | - **Type**: `Redis` 84 | - **Description**: The instance of ioredis to use for caching. 85 | 86 | - **defaultTtl (optional)** 87 | - **Type**: `number` 88 | - **Default**: `3600` (1 hour) 89 | - **Description**: The default time-to-live (TTL) for cache entries in seconds. 90 | 91 | - **maxEntrySize (optional)** 92 | - **Type**: `number` 93 | - **Default**: `5000000` (5 MB) 94 | - **Description**: The maximum size of a cache entry in bytes. Entries larger than this will not be cached. 95 | 96 | - **onCacheHit (optional)** 97 | - **Type**: `EventHandler` 98 | - **Description**: A callback function to handle cache hits. It receives the query key and execution time in milliseconds as parameters. 99 | 100 | - **onCacheMiss (optional)** 101 | - **Type**: `EventHandler` 102 | - **Description**: A callback function to handle cache misses. It receives the query key and execution time in milliseconds as parameters. 103 | 104 | 105 | ## Functions 106 | 107 | configureCache() is a factory function that returns the following methods for you to use wherever needed. 108 | 109 | #### `set(queryKey: string, data: string | Buffer, dependencies: string[], ttlInSeconds?: number): Promise` 110 | Adds a query result to the cache. 111 | 112 | - **queryKey:** The key under which the data is stored. 113 | - **data:** The data to be cached, either as a string or a Buffer. 114 | - **dependencies:** An array of dependencies associated with this cache entry. 115 | - **ttlInSeconds:** Time-to-live for the cache entry in seconds (optional, defaults to the configured defaultTtl). 116 | 117 | #### `get(queryKey: string): Promise` 118 | Retrieves a cached query result. 119 | 120 | - **queryKey:** The key of the cached data to retrieve. 121 | - **Returns:** The cached data as a string, or null if the data is not found. 122 | 123 | #### `invalidate(dependency: string): Promise` 124 | Invalidates cache entries based on a dependency. 125 | 126 | - **dependency:** The dependency key whose associated cache entries need to be invalidated. 127 | 128 | #### `clear(): Promise` 129 | Clears the entire cache for the current Redis database. 130 | 131 | #### `getSize(): Promise` 132 | Returns the total number of keys in the current Redis database. 133 | 134 | #### `getStringKeySize(): Promise` 135 | Returns the number of string keys in the current Redis database. 136 | 137 | ## Integrating with PostgreSQL 138 | 139 | To integrate with PostgreSQL, create your Pool instance and write your own wrapper functions that call functions from the cachemunk library to set, get and invalidate query results. Here are some simple examples. 140 | 141 | ### Create a new Pool Instance 142 | 143 | ``` 144 | import { Pool } from 'pg'; 145 | 146 | const pool = new Pool({ 147 | user: 'postgres', // Replace with your PostgreSQL user 148 | host: 'localhost', // Replace with your PostgreSQL host 149 | database: 'mydatabase', // Replace with your PostgreSQL database 150 | password: 'mypassword', // Replace with your PostgreSQL password 151 | port: 5432, // Replace with your PostgreSQL port 152 | }); 153 | ``` 154 | 155 | ### Define a query function 156 | 157 | ``` 158 | export const query = async (text, params) => { 159 | try { 160 | const result = await pool.query(text, params); 161 | return result; 162 | } catch (err) { 163 | console.error(`Error executing query ${text}:`, err); 164 | throw err; // Re-throw the error after logging it 165 | } 166 | }; 167 | ``` 168 | 169 | ### Define a getData() function that uses cachemunk `get` and `set` 170 | 171 | ``` 172 | // Define getData function 173 | const getData = async (queryKey, queryText, dependencies) => { 174 | // Check the cache 175 | const cachedResult = await cache.get(queryKey); 176 | if (cachedResult) { 177 | console.log('Cache hit'); 178 | return JSON.parse(cachedResult); 179 | } 180 | 181 | // Query the database if not cached 182 | const result = await query(queryText); 183 | 184 | // Cache the result 185 | await cache.set(queryKey, JSON.stringify(result.rows), dependencies); 186 | 187 | console.log('Cache miss'); 188 | return result.rows; 189 | }; 190 | ``` 191 | 192 | ### Define an insertData() function that uses cachemunk `invalidate` 193 | 194 | ``` 195 | // Define insertData function with cache invalidation 196 | const insertData = async (insertText, dependencies) => { 197 | // Execute the insert query 198 | try { 199 | const result = await query(insertText); 200 | 201 | // Invalidate the cache for the specific dependencies 202 | await cache.invalidate(dependencies); 203 | 204 | console.log('Cache invalidated for dependencies:', dependencies); 205 | return result; 206 | } catch (err) { 207 | console.error(`Error executing insert ${insertText}:`, err); 208 | throw err; // Re-throw the error after logging it 209 | } 210 | }; 211 | 212 | ``` 213 | 214 | ## Example - Test Factory Functions with Redis Only 215 | 216 | You can copy, paste, and run the code the code below to see these functions in action! 217 | 218 | ``` 219 | import { Redis } from 'ioredis'; 220 | import { configureCache } from 'cachemunk'; 221 | 222 | const redis = new Redis({ 223 | host: '127.0.0.1', // modify as needed or pass in as env variable 224 | port: 6379 // modify as needed or pass in as env variable 225 | }); 226 | 227 | const cache = configureCache({ 228 | redis, 229 | defaultTtl: 3600, 230 | maxEntrySize: 5000000, 231 | onCacheHit: (queryKey, executionTime) => { 232 | console.log(`Cache hit for key: ${queryKey} in ${executionTime}ms`); 233 | }, 234 | onCacheMiss: (queryKey, executionTime) => { 235 | console.log(`Cache miss for key: ${queryKey} in ${executionTime}ms`); 236 | } 237 | }); 238 | 239 | async function testCache() { 240 | try { 241 | console.log('\n===================='); 242 | console.log('Testing cache.set and cache.get'); 243 | console.log('====================\n'); 244 | 245 | // Call the set function with a queryKey, data, and an array of dependencies 246 | await cache.set('testKey', 'testValue', []); 247 | 248 | // Call the get function to retrieve the cached value 249 | let value = await cache.get('testKey'); 250 | console.log("\ntestKey:", value); // Should print 'testValue' 251 | 252 | console.log('\n===================='); 253 | console.log('Testing cache.invalidate'); 254 | console.log('====================\n'); 255 | 256 | // Call the set function with a queryKey, data, and an array of dependencies 257 | await cache.set('testKey2', 'testValue to be invalidated', ['testDependency']); 258 | 259 | // Call the get function to retrieve the cached value 260 | value = await cache.get('testKey2'); 261 | console.log("\ntestKey2 before invalidation:", value); // Should print 'testValue to be invalidated' 262 | 263 | // Invalidate cache for a dependency 264 | await cache.invalidate('testDependency'); 265 | 266 | // Introduce a 50ms delay to clear L1 Cache 267 | await new Promise(resolve => setTimeout(resolve, 50)); 268 | 269 | // Call the get function to retrieve the invalidated value 270 | value = await cache.get('testKey2'); 271 | console.log("\ntestKey2 after invalidation:", value); // Should print null 272 | 273 | console.log('\n===================='); 274 | console.log('Adding additional keys to cache'); 275 | console.log('====================\n'); 276 | 277 | // Introduce a 50ms delay to clear L1 Cache 278 | await new Promise(resolve => setTimeout(resolve, 50)); 279 | 280 | 281 | console.log('\n===================='); 282 | console.log('Testing cache.clear'); 283 | console.log('====================\n'); 284 | 285 | // Clear the entire cache 286 | await cache.clear(); 287 | 288 | // Attempt to get the value after clearing cache 289 | value = await cache.get('testKey'); 290 | console.log("\ntestKey after clear:", value); // Should print null 291 | 292 | console.log('\n===================='); 293 | console.log('Testing cache.getSize'); 294 | console.log('====================\n'); 295 | 296 | // Add two additional keys to the cache 297 | await cache.set('additionalKey1', 'value 1', ['testDependency']); 298 | await cache.set('additionalKey2', 'value 2', []); 299 | 300 | // Check the size of the cache 301 | const size = await cache.getSize(); 302 | console.log("\nCache size:", size); // Should print the size of the cache (number of keys) 303 | 304 | console.log('\n===================='); 305 | console.log('Testing cache.getStringKeySize'); 306 | console.log('====================\n'); 307 | 308 | // Check the size of the cache for string keys 309 | const stringKeySize = await cache.getStringKeySize(); 310 | console.log("\nString key size:", stringKeySize); // Should print the number of string keys in the cache 311 | 312 | // Exit the process 313 | process.exit(0); 314 | } catch (error) { 315 | console.error('\n===================='); 316 | console.error('Error:', error); 317 | console.error('====================\n'); 318 | // Exit with error code 319 | process.exit(1); 320 | } 321 | } 322 | 323 | testCache(); 324 | 325 | ``` 326 | 327 | ## Example - Test Factory Functions with Redis and PostgreSQL 328 | 329 | ### Cachemunk Integrated with PostgreSQL 330 | 331 | Copy, paste, and run the code the code below to see the library in action, integrated with PostgreSQL and Redis. It assumes you have a Redis server and PostgreSQL server running locally with a database called "mydatabase". 332 | 333 | ``` 334 | import pkg from 'pg'; 335 | const { Pool } = pkg; 336 | import { Redis } from 'ioredis'; 337 | import { configureCache } from 'cachemunk'; 338 | 339 | // Configure Redis 340 | const redis = new Redis({ 341 | host: '127.0.0.1', // Replace with your Redis host 342 | port: 6379 // Replace with your Redis port 343 | }); 344 | 345 | // Configure Cachemunk 346 | const cache = configureCache({ 347 | redis, 348 | defaultTtl: 3600, 349 | maxEntrySize: 5000000, 350 | onCacheHit: (queryKey, executionTime) => { 351 | console.log(`Cache hit for key: ${queryKey} in ${executionTime}ms`); 352 | }, 353 | onCacheMiss: (queryKey, executionTime) => { 354 | console.log(`Cache miss for key: ${queryKey} in ${executionTime}ms`); 355 | } 356 | }); 357 | 358 | // Create a new Pool instance 359 | const pool = new Pool({ 360 | user: 'postgres', // Replace with your PostgreSQL user 361 | host: 'localhost', // Replace with your PostgreSQL host 362 | database: 'mydatabase', // Replace with your PostgreSQL database 363 | password: 'mypassword', // Replace with your PostgreSQL password 364 | port: 5432, // Replace with your PostgreSQL port 365 | }); 366 | 367 | // Define a query function 368 | const query = async (text) => { 369 | try { 370 | const result = await pool.query(text); 371 | return result; 372 | } catch (err) { 373 | console.error(`Error executing query ${text}:`, err); 374 | throw err; // Re-throw the error after logging it 375 | } 376 | }; 377 | 378 | // Define getData function 379 | const getData = async (queryKey, queryText, dependencies) => { 380 | // Check the cache 381 | await delay(50); // Delay before interacting with cache 382 | const cachedResult = await cache.get(queryKey); 383 | if (cachedResult) { 384 | console.log('Cache hit'); 385 | return JSON.parse(cachedResult); 386 | } 387 | 388 | // Query the database if not cached 389 | const result = await query(queryText); 390 | 391 | // Cache the result 392 | await delay(50); // Delay before interacting with cache 393 | await cache.set(queryKey, JSON.stringify(result.rows), dependencies); 394 | 395 | console.log('Cache miss'); 396 | return result.rows; 397 | }; 398 | 399 | // Define insertData function with cache invalidation 400 | const insertData = async (insertText, dependencies) => { 401 | // Execute the insert query 402 | try { 403 | const result = await query(insertText); 404 | 405 | // Invalidate the cache for the specific dependencies 406 | await delay(50); // Delay before interacting with cache 407 | await cache.invalidate(dependencies); 408 | 409 | console.log('Cache invalidated for dependencies:', dependencies); 410 | return result; 411 | } catch (err) { 412 | console.error(`Error executing insert ${insertText}:`, err); 413 | throw err; // Re-throw the error after logging it 414 | } 415 | }; 416 | 417 | // Define a function to fetch the entire table 418 | const fetchAllData = async () => { 419 | try { 420 | const result = await query('SELECT * FROM mytable'); 421 | return result.rows; 422 | } catch (err) { 423 | console.error('Error fetching all data:', err); 424 | throw err; 425 | } 426 | }; 427 | 428 | // Define a function to setup the database 429 | const setupDatabase = async () => { 430 | try { 431 | // Drop the table if it exists 432 | await query('DROP TABLE IF EXISTS mytable'); 433 | 434 | // Drop the sequence if it exists to avoid duplicate key issues 435 | await query('DROP SEQUENCE IF EXISTS mytable_id_seq'); 436 | 437 | // Create the table 438 | await query(` 439 | CREATE TABLE mytable ( 440 | id SERIAL PRIMARY KEY, 441 | name VARCHAR(255) NOT NULL 442 | ); 443 | `); 444 | 445 | // Insert initial dummy data 446 | await query("INSERT INTO mytable (name) VALUES ('First Entry')"); 447 | await query("INSERT INTO mytable (name) VALUES ('Second Entry')"); 448 | await query("INSERT INTO mytable (name) VALUES ('Third Entry')"); 449 | } catch (err) { 450 | console.error('Error setting up the database:', err); 451 | throw err; 452 | } 453 | }; 454 | 455 | // Define a function to clear all cache 456 | const clearCache = async () => { 457 | try { 458 | await cache.clear(); 459 | console.log('All cache keys cleared.'); 460 | } catch (err) { 461 | console.error('Error clearing cache:', err); 462 | } 463 | }; 464 | 465 | // Delay function to wait for a specified number of milliseconds 466 | const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); 467 | 468 | const main = async () => { 469 | try { 470 | // Setup the database 471 | await setupDatabase(); 472 | 473 | // Insert data and invalidate the cache 474 | const insertText1 = "INSERT INTO mytable (name) VALUES ('New Entry 1')"; 475 | const dependencies = ['dependency1']; 476 | 477 | // Delay before interacting with cache 478 | await delay(50); 479 | await insertData(insertText1, dependencies); 480 | 481 | // Delay before interacting with cache 482 | await delay(50); 483 | // Get data (should query the database and cache the result) 484 | const queryKey = 'myQueryKey'; 485 | const queryText = 'SELECT * FROM mytable'; 486 | let data = await getData(queryKey, queryText, dependencies); 487 | console.log('Data:', data); 488 | 489 | // Delay before interacting with cache 490 | await delay(50); 491 | // Perform a cache hit 492 | data = await getData(queryKey, queryText, dependencies); 493 | console.log('Data (cache hit):', data); 494 | 495 | // Delay before interacting with cache 496 | await delay(50); 497 | // Insert more data and invalidate the cache 498 | const insertText2 = "INSERT INTO mytable (name) VALUES ('New Entry 2')"; 499 | await insertData(insertText2, dependencies); 500 | 501 | // Delay before interacting with cache 502 | await delay(50); 503 | // Get data again (should query the database again due to invalidation) 504 | const dataAfterInsert = await getData(queryKey, queryText, dependencies); 505 | console.log('Data after insert:', dataAfterInsert); 506 | 507 | // Delay before interacting with cache 508 | await delay(50); 509 | // Fetch and show the entire table 510 | const allData = await fetchAllData(); 511 | console.log('All data in mytable:', allData); 512 | 513 | // Clear all cache keys 514 | console.log('Clearing all cache keys...'); 515 | await clearCache(); 516 | 517 | // Verify cache is cleared by attempting to get data 518 | console.log('Verifying cache is cleared...'); 519 | data = await getData(queryKey, queryText, dependencies); 520 | console.log('Data after cache clear:', data); 521 | 522 | } catch (error) { 523 | console.error('Error:', error); 524 | } 525 | }; 526 | 527 | main(); 528 | ``` 529 | ## Issues 530 | 531 | ## Contributors 532 | ![Olivia Carlisle](https://github.com/oliviacarlisle.png?size=100) 533 | 534 | Olivia Carlisle 535 | Github: [@oliviacarlisle](https://github.com/oliviacarlisle) 536 | 537 | ![Jayan Pillai](https://github.com/jrpillai.png?size=100) 538 | 539 | Jayan Pillai 540 | Github: [@jrpillai](https://github.com/jrpillai) 541 | 542 | ![Nick Angelopoulous](https://github.com/nickangel7.png?size=100) 543 | 544 | Nick Angelopoulous 545 | Github: [@nickangel7](https://github.com/nickangel7) 546 | 547 | ![Amy YQ Jiang](https://github.com/yj776.png?size=100) 548 | 549 | Amy YQ Jiang 550 | Github: [@yj776](https://github.com/yj776) 551 | -------------------------------------------------------------------------------- /cachemunk/cachemunk-1.0.0.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/CacheMunk/5fb97164d77057c8401aa65ebc9bb7c2c5033fee/cachemunk/cachemunk-1.0.0.tgz -------------------------------------------------------------------------------- /cachemunk/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cachemunk", 3 | "version": "1.0.3", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "cachemunk", 9 | "version": "1.0.3", 10 | "license": "MIT", 11 | "dependencies": { 12 | "ioredis": "^5.4.1", 13 | "snappy": "^7.2.2" 14 | }, 15 | "devDependencies": { 16 | "@types/node": "^20.14.1", 17 | "typescript": "^5.4.5" 18 | } 19 | }, 20 | "node_modules/@ioredis/commands": { 21 | "version": "1.2.0", 22 | "resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.2.0.tgz", 23 | "integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==", 24 | "license": "MIT" 25 | }, 26 | "node_modules/@napi-rs/snappy-android-arm-eabi": { 27 | "version": "7.2.2", 28 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-android-arm-eabi/-/snappy-android-arm-eabi-7.2.2.tgz", 29 | "integrity": "sha512-H7DuVkPCK5BlAr1NfSU8bDEN7gYs+R78pSHhDng83QxRnCLmVIZk33ymmIwurmoA1HrdTxbkbuNl+lMvNqnytw==", 30 | "cpu": [ 31 | "arm" 32 | ], 33 | "license": "MIT", 34 | "optional": true, 35 | "os": [ 36 | "android" 37 | ], 38 | "engines": { 39 | "node": ">= 10" 40 | } 41 | }, 42 | "node_modules/@napi-rs/snappy-android-arm64": { 43 | "version": "7.2.2", 44 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-android-arm64/-/snappy-android-arm64-7.2.2.tgz", 45 | "integrity": "sha512-2R/A3qok+nGtpVK8oUMcrIi5OMDckGYNoBLFyli3zp8w6IArPRfg1yOfVUcHvpUDTo9T7LOS1fXgMOoC796eQw==", 46 | "cpu": [ 47 | "arm64" 48 | ], 49 | "license": "MIT", 50 | "optional": true, 51 | "os": [ 52 | "android" 53 | ], 54 | "engines": { 55 | "node": ">= 10" 56 | } 57 | }, 58 | "node_modules/@napi-rs/snappy-darwin-arm64": { 59 | "version": "7.2.2", 60 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-darwin-arm64/-/snappy-darwin-arm64-7.2.2.tgz", 61 | "integrity": "sha512-USgArHbfrmdbuq33bD5ssbkPIoT7YCXCRLmZpDS6dMDrx+iM7eD2BecNbOOo7/v1eu6TRmQ0xOzeQ6I/9FIi5g==", 62 | "cpu": [ 63 | "arm64" 64 | ], 65 | "license": "MIT", 66 | "optional": true, 67 | "os": [ 68 | "darwin" 69 | ], 70 | "engines": { 71 | "node": ">= 10" 72 | } 73 | }, 74 | "node_modules/@napi-rs/snappy-darwin-x64": { 75 | "version": "7.2.2", 76 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-darwin-x64/-/snappy-darwin-x64-7.2.2.tgz", 77 | "integrity": "sha512-0APDu8iO5iT0IJKblk2lH0VpWSl9zOZndZKnBYIc+ei1npw2L5QvuErFOTeTdHBtzvUHASB+9bvgaWnQo4PvTQ==", 78 | "cpu": [ 79 | "x64" 80 | ], 81 | "license": "MIT", 82 | "optional": true, 83 | "os": [ 84 | "darwin" 85 | ], 86 | "engines": { 87 | "node": ">= 10" 88 | } 89 | }, 90 | "node_modules/@napi-rs/snappy-freebsd-x64": { 91 | "version": "7.2.2", 92 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-freebsd-x64/-/snappy-freebsd-x64-7.2.2.tgz", 93 | "integrity": "sha512-mRTCJsuzy0o/B0Hnp9CwNB5V6cOJ4wedDTWEthsdKHSsQlO7WU9W1yP7H3Qv3Ccp/ZfMyrmG98Ad7u7lG58WXA==", 94 | "cpu": [ 95 | "x64" 96 | ], 97 | "license": "MIT", 98 | "optional": true, 99 | "os": [ 100 | "freebsd" 101 | ], 102 | "engines": { 103 | "node": ">= 10" 104 | } 105 | }, 106 | "node_modules/@napi-rs/snappy-linux-arm-gnueabihf": { 107 | "version": "7.2.2", 108 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-linux-arm-gnueabihf/-/snappy-linux-arm-gnueabihf-7.2.2.tgz", 109 | "integrity": "sha512-v1uzm8+6uYjasBPcFkv90VLZ+WhLzr/tnfkZ/iD9mHYiULqkqpRuC8zvc3FZaJy5wLQE9zTDkTJN1IvUcZ+Vcg==", 110 | "cpu": [ 111 | "arm" 112 | ], 113 | "license": "MIT", 114 | "optional": true, 115 | "os": [ 116 | "linux" 117 | ], 118 | "engines": { 119 | "node": ">= 10" 120 | } 121 | }, 122 | "node_modules/@napi-rs/snappy-linux-arm64-gnu": { 123 | "version": "7.2.2", 124 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-linux-arm64-gnu/-/snappy-linux-arm64-gnu-7.2.2.tgz", 125 | "integrity": "sha512-LrEMa5pBScs4GXWOn6ZYXfQ72IzoolZw5txqUHVGs8eK4g1HR9HTHhb2oY5ySNaKakG5sOgMsb1rwaEnjhChmQ==", 126 | "cpu": [ 127 | "arm64" 128 | ], 129 | "license": "MIT", 130 | "optional": true, 131 | "os": [ 132 | "linux" 133 | ], 134 | "engines": { 135 | "node": ">= 10" 136 | } 137 | }, 138 | "node_modules/@napi-rs/snappy-linux-arm64-musl": { 139 | "version": "7.2.2", 140 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-linux-arm64-musl/-/snappy-linux-arm64-musl-7.2.2.tgz", 141 | "integrity": "sha512-3orWZo9hUpGQcB+3aTLW7UFDqNCQfbr0+MvV67x8nMNYj5eAeUtMmUE/HxLznHO4eZ1qSqiTwLbVx05/Socdlw==", 142 | "cpu": [ 143 | "arm64" 144 | ], 145 | "license": "MIT", 146 | "optional": true, 147 | "os": [ 148 | "linux" 149 | ], 150 | "engines": { 151 | "node": ">= 10" 152 | } 153 | }, 154 | "node_modules/@napi-rs/snappy-linux-x64-gnu": { 155 | "version": "7.2.2", 156 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-linux-x64-gnu/-/snappy-linux-x64-gnu-7.2.2.tgz", 157 | "integrity": "sha512-jZt8Jit/HHDcavt80zxEkDpH+R1Ic0ssiVCoueASzMXa7vwPJeF4ZxZyqUw4qeSy7n8UUExomu8G8ZbP6VKhgw==", 158 | "cpu": [ 159 | "x64" 160 | ], 161 | "license": "MIT", 162 | "optional": true, 163 | "os": [ 164 | "linux" 165 | ], 166 | "engines": { 167 | "node": ">= 10" 168 | } 169 | }, 170 | "node_modules/@napi-rs/snappy-linux-x64-musl": { 171 | "version": "7.2.2", 172 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-linux-x64-musl/-/snappy-linux-x64-musl-7.2.2.tgz", 173 | "integrity": "sha512-Dh96IXgcZrV39a+Tej/owcd9vr5ihiZ3KRix11rr1v0MWtVb61+H1GXXlz6+Zcx9y8jM1NmOuiIuJwkV4vZ4WA==", 174 | "cpu": [ 175 | "x64" 176 | ], 177 | "license": "MIT", 178 | "optional": true, 179 | "os": [ 180 | "linux" 181 | ], 182 | "engines": { 183 | "node": ">= 10" 184 | } 185 | }, 186 | "node_modules/@napi-rs/snappy-win32-arm64-msvc": { 187 | "version": "7.2.2", 188 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-win32-arm64-msvc/-/snappy-win32-arm64-msvc-7.2.2.tgz", 189 | "integrity": "sha512-9No0b3xGbHSWv2wtLEn3MO76Yopn1U2TdemZpCaEgOGccz1V+a/1d16Piz3ofSmnA13HGFz3h9NwZH9EOaIgYA==", 190 | "cpu": [ 191 | "arm64" 192 | ], 193 | "license": "MIT", 194 | "optional": true, 195 | "os": [ 196 | "win32" 197 | ], 198 | "engines": { 199 | "node": ">= 10" 200 | } 201 | }, 202 | "node_modules/@napi-rs/snappy-win32-ia32-msvc": { 203 | "version": "7.2.2", 204 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-win32-ia32-msvc/-/snappy-win32-ia32-msvc-7.2.2.tgz", 205 | "integrity": "sha512-QiGe+0G86J74Qz1JcHtBwM3OYdTni1hX1PFyLRo3HhQUSpmi13Bzc1En7APn+6Pvo7gkrcy81dObGLDSxFAkQQ==", 206 | "cpu": [ 207 | "ia32" 208 | ], 209 | "license": "MIT", 210 | "optional": true, 211 | "os": [ 212 | "win32" 213 | ], 214 | "engines": { 215 | "node": ">= 10" 216 | } 217 | }, 218 | "node_modules/@napi-rs/snappy-win32-x64-msvc": { 219 | "version": "7.2.2", 220 | "resolved": "https://registry.npmjs.org/@napi-rs/snappy-win32-x64-msvc/-/snappy-win32-x64-msvc-7.2.2.tgz", 221 | "integrity": "sha512-a43cyx1nK0daw6BZxVcvDEXxKMFLSBSDTAhsFD0VqSKcC7MGUBMaqyoWUcMiI7LBSz4bxUmxDWKfCYzpEmeb3w==", 222 | "cpu": [ 223 | "x64" 224 | ], 225 | "license": "MIT", 226 | "optional": true, 227 | "os": [ 228 | "win32" 229 | ], 230 | "engines": { 231 | "node": ">= 10" 232 | } 233 | }, 234 | "node_modules/@types/node": { 235 | "version": "20.14.1", 236 | "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.1.tgz", 237 | "integrity": "sha512-T2MzSGEu+ysB/FkWfqmhV3PLyQlowdptmmgD20C6QxsS8Fmv5SjpZ1ayXaEC0S21/h5UJ9iA6W/5vSNU5l00OA==", 238 | "dev": true, 239 | "license": "MIT", 240 | "dependencies": { 241 | "undici-types": "~5.26.4" 242 | } 243 | }, 244 | "node_modules/cluster-key-slot": { 245 | "version": "1.1.2", 246 | "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", 247 | "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", 248 | "license": "Apache-2.0", 249 | "engines": { 250 | "node": ">=0.10.0" 251 | } 252 | }, 253 | "node_modules/debug": { 254 | "version": "4.3.5", 255 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", 256 | "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", 257 | "license": "MIT", 258 | "dependencies": { 259 | "ms": "2.1.2" 260 | }, 261 | "engines": { 262 | "node": ">=6.0" 263 | }, 264 | "peerDependenciesMeta": { 265 | "supports-color": { 266 | "optional": true 267 | } 268 | } 269 | }, 270 | "node_modules/denque": { 271 | "version": "2.1.0", 272 | "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", 273 | "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==", 274 | "license": "Apache-2.0", 275 | "engines": { 276 | "node": ">=0.10" 277 | } 278 | }, 279 | "node_modules/ioredis": { 280 | "version": "5.4.1", 281 | "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.4.1.tgz", 282 | "integrity": "sha512-2YZsvl7jopIa1gaePkeMtd9rAcSjOOjPtpcLlOeusyO+XH2SK5ZcT+UCrElPP+WVIInh2TzeI4XW9ENaSLVVHA==", 283 | "license": "MIT", 284 | "dependencies": { 285 | "@ioredis/commands": "^1.1.1", 286 | "cluster-key-slot": "^1.1.0", 287 | "debug": "^4.3.4", 288 | "denque": "^2.1.0", 289 | "lodash.defaults": "^4.2.0", 290 | "lodash.isarguments": "^3.1.0", 291 | "redis-errors": "^1.2.0", 292 | "redis-parser": "^3.0.0", 293 | "standard-as-callback": "^2.1.0" 294 | }, 295 | "engines": { 296 | "node": ">=12.22.0" 297 | }, 298 | "funding": { 299 | "type": "opencollective", 300 | "url": "https://opencollective.com/ioredis" 301 | } 302 | }, 303 | "node_modules/lodash.defaults": { 304 | "version": "4.2.0", 305 | "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", 306 | "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==", 307 | "license": "MIT" 308 | }, 309 | "node_modules/lodash.isarguments": { 310 | "version": "3.1.0", 311 | "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", 312 | "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==", 313 | "license": "MIT" 314 | }, 315 | "node_modules/ms": { 316 | "version": "2.1.2", 317 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", 318 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", 319 | "license": "MIT" 320 | }, 321 | "node_modules/redis-errors": { 322 | "version": "1.2.0", 323 | "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", 324 | "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==", 325 | "license": "MIT", 326 | "engines": { 327 | "node": ">=4" 328 | } 329 | }, 330 | "node_modules/redis-parser": { 331 | "version": "3.0.0", 332 | "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", 333 | "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", 334 | "license": "MIT", 335 | "dependencies": { 336 | "redis-errors": "^1.0.0" 337 | }, 338 | "engines": { 339 | "node": ">=4" 340 | } 341 | }, 342 | "node_modules/snappy": { 343 | "version": "7.2.2", 344 | "resolved": "https://registry.npmjs.org/snappy/-/snappy-7.2.2.tgz", 345 | "integrity": "sha512-iADMq1kY0v3vJmGTuKcFWSXt15qYUz7wFkArOrsSg0IFfI3nJqIJvK2/ZbEIndg7erIJLtAVX2nSOqPz7DcwbA==", 346 | "license": "MIT", 347 | "engines": { 348 | "node": ">= 10" 349 | }, 350 | "funding": { 351 | "type": "github", 352 | "url": "https://github.com/sponsors/Brooooooklyn" 353 | }, 354 | "optionalDependencies": { 355 | "@napi-rs/snappy-android-arm-eabi": "7.2.2", 356 | "@napi-rs/snappy-android-arm64": "7.2.2", 357 | "@napi-rs/snappy-darwin-arm64": "7.2.2", 358 | "@napi-rs/snappy-darwin-x64": "7.2.2", 359 | "@napi-rs/snappy-freebsd-x64": "7.2.2", 360 | "@napi-rs/snappy-linux-arm-gnueabihf": "7.2.2", 361 | "@napi-rs/snappy-linux-arm64-gnu": "7.2.2", 362 | "@napi-rs/snappy-linux-arm64-musl": "7.2.2", 363 | "@napi-rs/snappy-linux-x64-gnu": "7.2.2", 364 | "@napi-rs/snappy-linux-x64-musl": "7.2.2", 365 | "@napi-rs/snappy-win32-arm64-msvc": "7.2.2", 366 | "@napi-rs/snappy-win32-ia32-msvc": "7.2.2", 367 | "@napi-rs/snappy-win32-x64-msvc": "7.2.2" 368 | } 369 | }, 370 | "node_modules/standard-as-callback": { 371 | "version": "2.1.0", 372 | "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", 373 | "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==", 374 | "license": "MIT" 375 | }, 376 | "node_modules/typescript": { 377 | "version": "5.4.5", 378 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", 379 | "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", 380 | "dev": true, 381 | "license": "Apache-2.0", 382 | "bin": { 383 | "tsc": "bin/tsc", 384 | "tsserver": "bin/tsserver" 385 | }, 386 | "engines": { 387 | "node": ">=14.17" 388 | } 389 | }, 390 | "node_modules/undici-types": { 391 | "version": "5.26.5", 392 | "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", 393 | "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", 394 | "dev": true, 395 | "license": "MIT" 396 | } 397 | } 398 | } 399 | -------------------------------------------------------------------------------- /cachemunk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cachemunk", 3 | "version": "1.0.3", 4 | "description": "Performance-Optimized Caching Middleware for Node.js: Efficiently abstracts Redis to cache RDBMS query results, enhancing Node.js applications with sub-millisecond latency", 5 | "main": "dist/cache.js", 6 | "types": "dist/cache.d.ts", 7 | "scripts": { 8 | "test": "echo \"Error: no test specified\" && exit 1", 9 | "build": "tsc", 10 | "prepare": "npm run build" 11 | }, 12 | "repository": { 13 | "type": "git", 14 | "url": "git+https://github.com/oslabs-beta/CacheMunk.git" 15 | }, 16 | "keywords": [ 17 | "Redis", 18 | "cache", 19 | "PostgreSQL", 20 | "Node.js" 21 | ], 22 | "author": "", 23 | "license": "MIT", 24 | "bugs": { 25 | "url": "https://github.com/oslabs-beta/CacheMunk/issues" 26 | }, 27 | "homepage": "https://github.com/oslabs-beta/CacheMunk#readme", 28 | "dependencies": { 29 | "ioredis": "^5.4.1", 30 | "snappy": "^7.2.2" 31 | }, 32 | "devDependencies": { 33 | "@types/node": "^20.14.1", 34 | "typescript": "^5.4.5" 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /cachemunk/src/cache.ts: -------------------------------------------------------------------------------- 1 | import { Redis } from 'ioredis'; 2 | import { compress, uncompress } from 'snappy'; 3 | 4 | type EventHandler = (queryKey: string, executionTime: number) => void; 5 | 6 | interface Config { 7 | redis: Redis; 8 | defaultTtl?: number; 9 | maxEntrySize?: number; 10 | onCacheHit?: EventHandler; 11 | onCacheMiss?: EventHandler; 12 | } 13 | // write cache in the functional style (creator function) 14 | // instead of class (OOP) syntax for stronger encapsulation 15 | export const configureCache = (options: Config) => { 16 | const { redis } = options; 17 | 18 | const cacheL1 = new Map(); 19 | 20 | // checks that redis passed in is an instance of Redis 21 | if (!(redis instanceof Redis)) { 22 | throw new Error('ioredis client not found'); 23 | } 24 | 25 | // set default ttl to 1 hour (3600 seconds) 26 | const defaultTtl = options.defaultTtl && options.defaultTtl > 0 ? options.defaultTtl : 3600; 27 | 28 | // set default maxEntrySize to 5MB (5_000_000 bytes) 29 | const maxEntrySize = 30 | options.maxEntrySize && options.maxEntrySize > 0 ? options.maxEntrySize : 5_000_000; 31 | 32 | const { onCacheHit, onCacheMiss } = options; 33 | 34 | const calcExecTime = (start: bigint, end: bigint) => { 35 | const diff = end - start; 36 | return Number(diff) / 1_000_000; // convert nanoseconds to milliseconds 37 | }; 38 | 39 | // Function to add a query result to the cache 40 | async function set( 41 | queryKey: string, 42 | data: string | Buffer, 43 | dependencies: string[], 44 | ttlInSeconds = defaultTtl, // default to 1 hour in seconds 45 | ): Promise { 46 | // Capture initial timestamp for performance monitoring 47 | // const start = process.hrtime.bigint(); 48 | 49 | // Convert data to binary Buffer if it is a string 50 | const binaryData = typeof data === 'string' ? Buffer.from(data) : data; 51 | 52 | // check if binary Data exceeds maxEntrySize 53 | if (binaryData.length > maxEntrySize) { 54 | throw new Error('maxEntrySize exceeded'); 55 | } 56 | 57 | // Compress buffer to save bandwidth using snappy. To further compress buffer. ex: 10kb ->3 kb 58 | const compressedData = await compress(binaryData); 59 | 60 | if (dependencies.length > 0) { 61 | // Create a pipeline/transaction (ensure data integrity and consistency. If one fail, all fails) 62 | const pipeline = redis.multi(); 63 | 64 | // Store the query result 65 | pipeline.set(queryKey, compressedData, 'EX', ttlInSeconds); 66 | 67 | // Track dependencies 68 | dependencies.forEach((dependency) => { 69 | const dependencyKey = `dependency:${dependency}`; 70 | pipeline.sadd(dependencyKey, queryKey); 71 | pipeline.expire(dependencyKey, ttlInSeconds); // Set the TTL for the dependency key 72 | }); 73 | 74 | // Execute the pipeline 75 | await pipeline.exec(); 76 | } else { 77 | await redis.set(queryKey, compressedData, 'EX', ttlInSeconds); 78 | } 79 | 80 | // Capture final timestamp 81 | // const end = process.hrtime.bigint(); 82 | 83 | // console.log(`write data to cache in ${calcExecTime(start, end).toFixed(3)}`); 84 | } 85 | 86 | // Function to retrieve a cached query result 87 | async function get(queryKey: string): Promise { 88 | // Capture initial timestamp for performance monitoring 89 | const start = process.hrtime.bigint(); 90 | 91 | const fromL1Cache = cacheL1.get(queryKey); 92 | if (fromL1Cache) { 93 | return fromL1Cache; 94 | } 95 | 96 | // Retrieve the cached query result based on query key 97 | // const startReq = process.hrtime.bigint(); 98 | const compressedData = await redis.getBuffer(queryKey); 99 | // const endReq = process.hrtime.bigint(); 100 | 101 | // Handle cache miss 102 | if (!compressedData) { 103 | // this is a cache miss 104 | // to do: log cache miss 105 | const end = process.hrtime.bigint(); 106 | if (onCacheMiss) onCacheMiss(queryKey, calcExecTime(start, end)); 107 | // console.log(`cache miss in ${calcExecTime(start, end).toFixed(3)}`); 108 | return null; 109 | } 110 | 111 | // Decompress result 112 | // const startSnappy = process.hrtime.bigint(); 113 | const binaryData = await uncompress(compressedData); 114 | // const endSnappy = process.hrtime.bigint(); 115 | 116 | // Convert result to string 117 | const data = binaryData.toString(); 118 | 119 | if (!cacheL1.has(queryKey)) { 120 | cacheL1.set(queryKey, data); 121 | setTimeout(() => cacheL1.delete(queryKey), 50); 122 | } 123 | 124 | // Capture final timestamp 125 | const end = process.hrtime.bigint(); 126 | 127 | if (onCacheHit) onCacheHit(queryKey, calcExecTime(start, end)); 128 | // console.log(`response from redis in ${calcExecTime(startReq, endReq).toFixed(3)}`); 129 | // console.log(`compressed data size ${compressedData.length / 1000} KB`); 130 | // console.log(`decompression in ${calcExecTime(startSnappy, endSnappy).toFixed(3)}`); 131 | // console.log(`cache hit in ${calcExecTime(start, end).toFixed(3)}`); 132 | return data; 133 | } 134 | 135 | // Function to invalidate cache based on table updates 136 | async function invalidate(dependency: string) { 137 | // const start = process.hrtime.bigint(); 138 | 139 | const dependencyKey = `dependency:${dependency}`; 140 | 141 | const queriesToInvalidate = await redis.smembers(dependencyKey); 142 | 143 | if (queriesToInvalidate.length > 0) { 144 | // Create a pipeline to batch multiple operations 145 | const pipeline = redis.multi(); 146 | 147 | queriesToInvalidate.forEach((queryKey) => pipeline.del(queryKey)); 148 | pipeline.del(dependencyKey); 149 | 150 | await pipeline.exec(); 151 | } else { 152 | // Clear the dependency set if it's the only key 153 | await redis.del(dependencyKey); 154 | } 155 | 156 | // const end = process.hrtime.bigint(); 157 | 158 | // console.log(`cache invalidate in ${calcExecTime(start, end).toFixed(3)}`); 159 | } 160 | 161 | // Function to clear the cache 162 | async function clear(): Promise { 163 | try { 164 | const result = await redis.flushall(); 165 | console.log('Cache for the current database cleared', result); 166 | } catch (err) { 167 | console.error('Error clearing cache:', err); 168 | } 169 | } 170 | 171 | async function getSize(): Promise { 172 | try { 173 | const size = await redis.dbsize(); 174 | return size; 175 | } catch (err) { 176 | console.error('Error getting cache size', err); 177 | return 0; 178 | } 179 | } 180 | 181 | async function getStringKeySize(): Promise { 182 | let cursor = '0'; 183 | let stringKeyCount = 0; 184 | 185 | try { 186 | do { 187 | const [newCursor, keys] = await redis.scan(cursor, 'COUNT', 100); 188 | 189 | cursor = newCursor; 190 | for (const key of keys) { 191 | const type = await redis.type(key); 192 | if (type === 'string') { 193 | stringKeyCount++; 194 | } 195 | } 196 | } while (cursor !== '0'); 197 | 198 | return stringKeyCount; 199 | } catch (err) { 200 | console.error('Error getting string key size', err); 201 | return 0; 202 | } 203 | } 204 | 205 | return { set, get, invalidate, clear, getSize, getStringKeySize }; 206 | }; 207 | 208 | export default configureCache; 209 | -------------------------------------------------------------------------------- /cachemunk/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["src"], 3 | "compilerOptions": { 4 | "strict": true, 5 | "target": "es2022", 6 | "esModuleInterop": true, 7 | "module": "NodeNext", 8 | "rootDir":"./src", 9 | "declaration": true, 10 | "outDir": "./dist" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /client/public/images/circle_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/CacheMunk/5fb97164d77057c8401aa65ebc9bb7c2c5033fee/client/public/images/circle_logo.png -------------------------------------------------------------------------------- /client/public/images/happy_cachemunk_png_transparent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/CacheMunk/5fb97164d77057c8401aa65ebc9bb7c2c5033fee/client/public/images/happy_cachemunk_png_transparent.png -------------------------------------------------------------------------------- /client/public/images/image-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/CacheMunk/5fb97164d77057c8401aa65ebc9bb7c2c5033fee/client/public/images/image-1.png -------------------------------------------------------------------------------- /client/public/images/sad_cachemunk_png_transparent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/CacheMunk/5fb97164d77057c8401aa65ebc9bb7c2c5033fee/client/public/images/sad_cachemunk_png_transparent.png -------------------------------------------------------------------------------- /client/src/App.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { BrowserRouter, Routes, Route, Link } from 'react-router-dom'; 3 | import { ThemeProvider, CssBaseline, Box, Tab, Tabs } from '@mui/material'; 4 | import theme from './theme'; 5 | import Header from './components/Header'; 6 | import CacheSwitch from './components/CacheSwitch'; 7 | import QueryBox from './components/QueryBox'; 8 | import SubmitButton from './components/SubmitButton'; 9 | import ResponseTimeChart from './components/ResponseTimeChart'; 10 | import CacheMetricsChart from './components/CacheMetricsChart'; 11 | import QueryResultBox from './components/QueryResultBox'; 12 | import SummaryGauges from './components/SummaryGauges'; 13 | import CustomSelectQuery from './components/CustomSelectQuery'; 14 | import CustomInsertQuery from './components/CustomInsertQuery'; 15 | import FrequencyDistribution from './components/FrequencyDistribution'; 16 | import ClearCacheButton from './components/ClearCacheButton'; 17 | import SummaryContainer from './components/SummaryContainer'; 18 | 19 | const App: React.FC = () => { 20 | const [cacheSwitch, setCacheSwitch] = useState(true); 21 | const [querySelect, setQuerySelect] = useState(''); 22 | const [cacheHits, setCacheHits] = useState(0); 23 | const [cacheMisses, setCacheMisses] = useState(0); 24 | const [responseTimes, setResponseTimes] = useState([]); 25 | const [queryResult, setQueryResult] = useState({}); 26 | const [tabValue, setTabValue] = useState(0); 27 | const [cacheSize, setCacheSize] = useState(0); 28 | const [cacheStatus, setCacheStatus] = useState(null); 29 | 30 | const handleTabChange = (event: React.SyntheticEvent, newValue: number) => { 31 | setTabValue(newValue); 32 | }; 33 | 34 | return ( 35 | 36 | 37 | 38 |
39 | 40 | 41 | 42 | 43 | 44 | 46 | 47 | 48 | 56 | 57 | 58 | 59 | 60 | 61 | 77 | 78 | 79 | 80 | 81 | 82 | 88 | 89 | 90 | 91 | 92 | 105 | 113 | 114 | } /> 115 | } /> 116 | 117 | 118 | 119 | ); 120 | }; 121 | 122 | export default App; 123 | 124 | -------------------------------------------------------------------------------- /client/src/components/CacheMetricsChart.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Doughnut } from 'react-chartjs-2'; 3 | import { Chart as ChartJS, ArcElement, Tooltip, Legend } from 'chart.js/auto'; 4 | import { Box, Typography, Card, CardContent, useTheme } from '@mui/material'; 5 | import StatusCard from './CacheStatus'; 6 | 7 | ChartJS.register(ArcElement, Tooltip, Legend); 8 | 9 | interface CacheMetricsChartProps { 10 | cacheHits: number; 11 | cacheMisses: number; 12 | cacheSize: number; 13 | cacheStatus: string; 14 | } 15 | 16 | const MetricCard: React.FC<{ title: string; value: number }> = ({ title, value }) => { 17 | const theme = useTheme(); 18 | 19 | const cardStyle = { 20 | width: '300px', 21 | height: '200px', 22 | backgroundColor: theme.palette.background.paper, 23 | color: theme.palette.text.primary, 24 | }; 25 | 26 | return ( 27 | 28 | 29 | 39 | {title} 40 | 41 | 48 | 49 | {value} 50 | 51 | 52 | 53 | 54 | ); 55 | }; 56 | 57 | const CacheMetricsChart: React.FC = ({ cacheHits, cacheMisses, cacheSize, cacheStatus }) => { 58 | const data = { 59 | labels: ['Cache Hits', 'Cache Misses'], 60 | datasets: [ 61 | { 62 | label: 'Cache Metrics', 63 | data: [cacheHits, cacheMisses], 64 | backgroundColor: ['#36A2EB', '#FF6384'], 65 | hoverBackgroundColor: ['#36A2EB', '#FF6384'], 66 | }, 67 | ], 68 | }; 69 | 70 | const theme = useTheme(); 71 | 72 | return ( 73 | 86 | 87 | Cache Metrics 88 | 89 | 96 | 103 | 104 | 105 | 113 | 114 | 115 | 123 | 124 | 125 | 126 | 127 | ); 128 | }; 129 | 130 | export default CacheMetricsChart; 131 | -------------------------------------------------------------------------------- /client/src/components/CacheStatus.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Card, CardContent, Typography, Box } from '@mui/material'; 3 | import { useTheme } from '@mui/material/styles'; 4 | 5 | interface StatusCardProps { 6 | cacheStatus: string; 7 | } 8 | 9 | const StatusCard: React.FC = ({ cacheStatus }) => { 10 | const theme = useTheme(); 11 | 12 | console.log("CacheStatus in CacheStatus component: ", cacheStatus) 13 | 14 | const getStatusColor = () => { 15 | switch (cacheStatus) { 16 | case 'CACHE_HIT': 17 | return '#36A2EB'; 18 | case 'CACHE_MISS': 19 | return '#FF6384'; 20 | default: 21 | return 'transparent'; 22 | } 23 | }; 24 | 25 | const imageSrc = cacheStatus === 'CACHE_HIT' 26 | ? '/images/happy_cachemunk_png_transparent.png' 27 | : cacheStatus === 'CACHE_MISS' 28 | ? '/images/sad_cachemunk_png_transparent.png' 29 | : null; 30 | 31 | const cacheMessage = cacheStatus === 'CACHE_HIT' 32 | ? 'Cache Hit!' 33 | : cacheStatus === 'CACHE_MISS' 34 | ? 'Cache Miss!' 35 | : null; 36 | 37 | const cardStyle = { 38 | width: '300px', 39 | height: '200px', 40 | backgroundColor: theme.palette.background.paper, 41 | color: getStatusColor(), 42 | }; 43 | 44 | return ( 45 | cacheStatus && ( 46 | 47 | 48 | 49 | {cacheMessage} 50 | 51 | 52 | {cacheMessage} 53 | 54 | 55 | 56 | ) 57 | ); 58 | 59 | }; 60 | 61 | export default StatusCard; 62 | -------------------------------------------------------------------------------- /client/src/components/CacheSwitch.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react'; 2 | import FormGroup from '@mui/material/FormGroup'; 3 | import FormControlLabel from '@mui/material/FormControlLabel'; 4 | import Switch from '@mui/material/Switch'; 5 | 6 | interface CacheSwitchProps { 7 | cacheSwitch: boolean; 8 | setCacheSwitch: React.Dispatch>; 9 | } 10 | const CacheSwitch: React.FC = ({ cacheSwitch, setCacheSwitch }) => { 11 | const handleToggle = (event: React.ChangeEvent) => { 12 | setCacheSwitch(event.target.checked); 13 | }; 14 | 15 | return ( 16 | 17 | } 19 | label='Cache' 20 | /> 21 | 22 | ); 23 | }; 24 | 25 | export default CacheSwitch; 26 | -------------------------------------------------------------------------------- /client/src/components/ClearCacheButton.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Button } from '@mui/material'; 3 | 4 | interface ClearCacheButtonProps { 5 | setCacheHits: React.Dispatch>; 6 | setCacheMisses: React.Dispatch>; 7 | setResponseTimes: React.Dispatch>; 8 | setQueryResult: React.Dispatch>; 9 | setCacheSize?: React.Dispatch>; 10 | setCacheStatus?: React.Dispatch>; 11 | label?: string; 12 | onClick?: () => void; 13 | disabled?: boolean; 14 | } 15 | 16 | const ClearCacheButton: React.FC = ({ 17 | setCacheHits, 18 | setCacheMisses, 19 | setResponseTimes, 20 | setQueryResult, 21 | setCacheSize, 22 | setCacheStatus, 23 | label = 'Clear Cache', 24 | onClick, 25 | disabled = false, 26 | }) => { 27 | const handleClick = async () => { 28 | try { 29 | await fetch('/deleteCache', { 30 | method: 'GET', 31 | headers: { 32 | 'Content-Type': 'application/json', 33 | }, 34 | }); 35 | 36 | setCacheHits(0); 37 | setCacheMisses(0); 38 | setResponseTimes([]); 39 | setQueryResult({}); 40 | setCacheSize(0); 41 | setCacheStatus(null); 42 | 43 | if (onClick) { 44 | onClick(); 45 | } 46 | } catch (error) { 47 | console.error('Error clearing cache:', error); 48 | } 49 | }; 50 | 51 | return ( 52 | 55 | ); 56 | }; 57 | 58 | export default ClearCacheButton; 59 | -------------------------------------------------------------------------------- /client/src/components/CustomInsertQuery.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Box, Button, TextField, Typography, MenuItem, Select, FormControl, InputLabel } from '@mui/material'; 3 | 4 | interface NewEntryFromProps { 5 | cacheHits: number; 6 | setCacheHits: React.Dispatch>; 7 | cacheMisses: number; 8 | setCacheMisses: React.Dispatch>; 9 | responseTimes: number[]; 10 | setResponseTimes: React.Dispatch>; 11 | } 12 | 13 | const stateCodes = { 14 | "AK": 1400, 15 | "AL": 1456, 16 | "AR": 1444, 17 | "AZ": 1434, 18 | "CA": 1416, 19 | "CO": 1450, 20 | "CT": 1435, 21 | "DC": 1437, 22 | "DE": 1399, 23 | "FL": 1436, 24 | "GA": 1455, 25 | "HI": 1411, 26 | "IA": 1459, 27 | "ID": 1460, 28 | "IL": 1425, 29 | "IN": 1440, 30 | "KS": 1406, 31 | "KY": 1419, 32 | "LA": 1457, 33 | "MA": 1433, 34 | "MD": 1401, 35 | "ME": 1453, 36 | "MI": 1426, 37 | "MN": 1420, 38 | "MO": 1451, 39 | "MS": 1430, 40 | "MT": 1446, 41 | "NC": 1447, 42 | "ND": 1418, 43 | "NE": 1408, 44 | "NH": 1404, 45 | "NJ": 1417, 46 | "NM": 1423, 47 | "NV": 1458, 48 | "NY": 1452, 49 | "OH": 4851, 50 | "OK": 1421, 51 | "OR": 1415, 52 | "PA": 1422, 53 | "PR": 1449, 54 | "RI": 1461, 55 | "SC": 1443, 56 | "SD": 1445, 57 | "TN": 1454, 58 | "TX": 1407, 59 | "UT": 1414, 60 | "VA": 1427, 61 | "VT": 1409, 62 | "WA": 1462, 63 | "WI": 1441, 64 | "WV": 1429, 65 | "WY": 1442 66 | }; 67 | 68 | const getRandomCoordinates = () => { 69 | const latitude = (Math.random() * (49.384358 - 24.396308) + 24.396308).toFixed(6); 70 | const longitude = (Math.random() * (-66.93457 - -125.00165) + -125.00165).toFixed(6); 71 | return { latitude, longitude }; 72 | }; 73 | 74 | const NewEntryForm: React.FC = ({ 75 | cacheHits, 76 | setCacheHits, 77 | cacheMisses, 78 | setCacheMisses, 79 | responseTimes, 80 | setResponseTimes 81 | }) => { 82 | const [formData, setFormData] = useState({ 83 | name: '', 84 | state_code: '', 85 | country_id: 233, 86 | country_code: 'US', 87 | latitude: '', 88 | longitude: '', 89 | flag: true, 90 | }); 91 | 92 | const [showData, setShowData] = useState(false); 93 | const [error, setError] = useState(''); 94 | const [success, setSuccess] = useState(false); 95 | 96 | const handleChange = (event) => { 97 | const { name, value } = event.target; 98 | const newFormData = { 99 | ...formData, 100 | [name]: value, 101 | state_id: name === 'state_code' ? stateCodes[value] : formData.state_id, 102 | }; 103 | if (name === 'state_code') { 104 | const { latitude, longitude } = getRandomCoordinates(); 105 | newFormData.latitude = latitude; 106 | newFormData.longitude = longitude; 107 | } 108 | setFormData(newFormData); 109 | }; 110 | 111 | const handleShowData = () => { 112 | setShowData((prevShowData) => !prevShowData); 113 | }; 114 | 115 | const handleSubmit = async (event) => { 116 | event.preventDefault(); 117 | setError(''); 118 | setSuccess(false); 119 | 120 | const submissionData = { ...formData }; 121 | 122 | try { 123 | const response = await fetch('/data/dynamic-insert', { 124 | method: 'POST', 125 | headers: { 126 | 'Content-Type': 'application/json', 127 | }, 128 | body: JSON.stringify(submissionData), 129 | }); 130 | 131 | if (!response.ok) { 132 | throw new Error('Network response was not ok'); 133 | } 134 | 135 | setSuccess(true); 136 | } catch (err) { 137 | setError('Error submitting form'); 138 | } 139 | }; 140 | 141 | return ( 142 | 143 | 144 | Add a New City in the United States 145 | 146 | 147 | 158 | 159 | State Code 160 | 172 | 173 | 174 | 177 | 180 | 181 | 182 | {showData && ( 183 | 184 | 185 | {`INSERT INTO public.cities ( 186 | country_code, 187 | country_id, 188 | flag, 189 | latitude, 190 | longitude, 191 | name, 192 | state_code, 193 | state_id 194 | ) 195 | VALUES ( 196 | '${formData.country_code}', 197 | ${formData.country_id}, 198 | ${formData.flag}, 199 | '${formData.latitude}', 200 | '${formData.longitude}', 201 | '${formData.name}', 202 | '${formData.state_code}', 203 | ${formData.state_id} 204 | );`} 205 | 206 | 207 | )} 208 | {error && {error}} 209 | {success && Form submitted successfully!} 210 | 211 | ); 212 | }; 213 | 214 | export default NewEntryForm; 215 | -------------------------------------------------------------------------------- /client/src/components/CustomSelectQuery.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Box, Button, TextField, Typography } from '@mui/material'; 3 | 4 | //write interface for CustomSelectQueryProps 5 | interface CustomSelectQueryProps { 6 | cacheSwitch: boolean; 7 | setCacheHits: React.Dispatch>; 8 | cacheMisses: number; 9 | setCacheMisses: React.Dispatch>; 10 | responseTimes: number[]; 11 | setResponseTimes: React.Dispatch>; 12 | cacheSize: number; 13 | setCacheSize: React.Dispatch>; 14 | cacheStatus: string; 15 | setCacheStatus: React.Dispatch>; 16 | } 17 | 18 | // CustomSelectQuery component 19 | const CustomSelectQuery: React.FC = ({ 20 | cacheSwitch, 21 | setCacheHits, 22 | cacheMisses, 23 | setCacheMisses, 24 | responseTimes, 25 | setResponseTimes, 26 | cacheSize, 27 | setCacheSize, 28 | cacheStatus, 29 | setCacheStatus, 30 | }) => { 31 | const [query, setQuery] = useState(''); 32 | const [result, setResult] = useState(null); 33 | const [error, setError] = useState(''); 34 | 35 | // fetchChartData function 36 | const fetchChartData = async () => { 37 | try { 38 | const cacheHitMissReponse = await fetch('/cache-analytics'); // fetches from endpoint 39 | const cacheHitMissData = await cacheHitMissReponse.json(); // converts to Javascript object 40 | setCacheHits(cacheHitMissData.cacheHits); // uses key to retrieve value and set state 41 | setCacheMisses(cacheHitMissData.cacheMisses); 42 | console.log("CacheHitMissData.status: ", cacheHitMissData.status) 43 | setCacheStatus(cacheHitMissData.status); 44 | 45 | const responseTimesResponse = await fetch('/cache-response-times'); 46 | const responseTimesData = await responseTimesResponse.json(); 47 | setResponseTimes(responseTimesData); 48 | 49 | const cacheSizeResponse = await fetch('/cacheSize'); 50 | const cacheSizeData = await cacheSizeResponse.json(); 51 | setCacheSize(cacheSizeData); 52 | } catch (error) { 53 | console.error('Error fetching Chart Data:', error); 54 | } 55 | }; 56 | 57 | // handleInputChange function 58 | const handleInputChange = (event) => { 59 | setQuery(event.target.value); 60 | }; 61 | 62 | // handleSubmit function 63 | const handleSubmit = async (event) => { 64 | event.preventDefault(); // Prevent the default form submission 65 | setError(''); // Clear any previous errors 66 | 67 | const endpoint = cacheSwitch ? '/data/cache/dynamic-select' : '/data/no-cache/dynamic-select'; 68 | 69 | try { 70 | const response = await fetch(endpoint, { // Fetch the data from the endpoint 71 | method: 'POST', 72 | headers: { 73 | 'Content-Type': 'application/json', // Specify the content type 74 | }, 75 | body: JSON.stringify({ query }), // Send the query as JSON 76 | }); 77 | 78 | if (!response.ok) { 79 | throw new Error('Network response was not ok'); 80 | } 81 | 82 | const data = await response.json(); // Parse the response as JSON 83 | setResult(data); // Set the result state 84 | } catch (err) { 85 | setError('Error executing query'); 86 | } 87 | 88 | // Fetch Chart Data after executing the query 89 | fetchChartData(); 90 | }; 91 | 92 | return ( 93 | 94 | 95 | Custom Select Query 96 | 97 | 98 | 109 | 110 | 113 | 114 | 115 | {error && {error}} 116 | {result && ( 117 | 118 | Query Result 119 |
132 |             {JSON.stringify(result, null, 2)}
133 |           
134 |
135 | )} 136 |
137 | ); 138 | }; 139 | 140 | 141 | export default CustomSelectQuery; 142 | -------------------------------------------------------------------------------- /client/src/components/FrequencyDistribution.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import Plot from 'react-plotly.js'; 3 | import { Box, Typography, useTheme } from '@mui/material'; 4 | 5 | const calculateBins = (data, binSize) => { 6 | const min = Math.min(...data); 7 | const max = Math.max(...data); 8 | const bins = []; 9 | 10 | for (let i = min; i <= max; i += binSize) { 11 | bins.push(i); 12 | } 13 | 14 | return bins; 15 | }; 16 | 17 | const FrequencyDistribution = ({ cacheData, noCacheData, binSize = 0.1 }) => { 18 | const theme = useTheme(); // Use the theme hook 19 | const combinedData = [...cacheData.values, ...noCacheData.values]; 20 | const bins = calculateBins(combinedData, binSize); 21 | 22 | return ( 23 | 24 | 25 | This histogram represents the frequency distribution of response times for cache and no-cache data. 26 | 27 | 84 | 85 | ); 86 | }; 87 | 88 | export default FrequencyDistribution; 89 | 90 | -------------------------------------------------------------------------------- /client/src/components/FrequencyDistribution2.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Box, Container, Typography } from '@mui/material'; 3 | import { BarChart, Bar, XAxis, YAxis, Tooltip, CartesianGrid, Legend, ResponsiveContainer, Label } from 'recharts'; 4 | 5 | const FrequencyDistribution = ({ cacheData, noCacheData }) => { 6 | const formatData = (data) => { 7 | return data.labels.map((label, index) => ({ 8 | label: label, 9 | cacheValue: data.values[index], 10 | noCacheValue: noCacheData.values[index] || 0, 11 | })); 12 | }; 13 | 14 | const data = formatData(cacheData); 15 | 16 | // Custom tick formatter function to round the labels 17 | const tickFormatter = (tick) => { 18 | return Math.round(tick); 19 | }; 20 | 21 | return ( 22 | 23 | 24 | This histogram represents the frequency distribution of response times for cache and no-cache data. 25 | 26 | 27 | 28 | 29 | 30 | 32 | 33 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | ); 43 | }; 44 | 45 | export default FrequencyDistribution; 46 | -------------------------------------------------------------------------------- /client/src/components/Header.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { AppBar, Toolbar, Typography, Box } from '@mui/material'; 3 | 4 | function Header() { 5 | return ( 6 | 7 | 8 | 9 | logo 10 | 11 | 12 | CacheMunk 13 | 14 | 15 | Performance-Optimized Caching Middleware for Node.js: Efficiently abstracts Redis to 16 | cache RDBMS query results, enhancing Node.js applications with sub-millisecond 17 | latency. Designed for high-demand production environments. 18 | 19 | 20 | 21 | 22 | 23 | ); 24 | } 25 | 26 | export default Header; 27 | -------------------------------------------------------------------------------- /client/src/components/QueryBox.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react'; 2 | import Box from '@mui/material/Box'; 3 | import InputLabel from '@mui/material/InputLabel'; 4 | import MenuItem from '@mui/material/MenuItem'; 5 | import FormControl from '@mui/material/FormControl'; 6 | import Select, { SelectChangeEvent } from '@mui/material/Select'; 7 | 8 | interface QueryBoxProps { 9 | querySelect: string; 10 | setQuerySelect: (newString: string) => void; 11 | } 12 | 13 | const BasicSelect: React.FC = ({ querySelect, setQuerySelect }) => { 14 | const handleChange = (event: SelectChangeEvent) => { 15 | setQuerySelect(event.target.value as string); 16 | }; 17 | 18 | return ( 19 | 20 | 21 | Query Type 22 | 33 | 34 | 35 | ); 36 | }; 37 | 38 | export default BasicSelect; 39 | -------------------------------------------------------------------------------- /client/src/components/QueryResultBox.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Box, Typography } from '@mui/material'; 3 | 4 | interface QueryResultProps { 5 | queryResult: Record | null; // Specify a more concrete type 6 | } 7 | 8 | const QueryResultBox: React.FC = ({ queryResult }) => { 9 | // Debugging: Log the queryresult to see what it contains at runtime 10 | console.log('Query Result:', queryResult); 11 | 12 | return ( 13 | 25 | 26 | Query Result 27 | 28 | 41 |
51 |           
52 |             {queryResult ? JSON.stringify(queryResult, null, 2) : 'No data available'}
53 |           
54 |         
55 |
56 |
57 | ); 58 | }; 59 | 60 | export default QueryResultBox; 61 | 62 | -------------------------------------------------------------------------------- /client/src/components/ResponseTimeChart.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Bar } from 'react-chartjs-2'; 3 | import { Box, Typography } from '@mui/material'; 4 | import 'chart.js/auto'; 5 | 6 | interface ResponseTimeChartProps { 7 | responseTimes: number[]; // Array of response times in milliseconds 8 | } 9 | 10 | const ResponseTimeChart: React.FC = ({ responseTimes }) => { 11 | const data = { 12 | labels: responseTimes.map((_, index) => `Request ${index + 1}`), 13 | datasets: [ 14 | { 15 | label: 'Response Time (ms)', 16 | data: responseTimes, 17 | backgroundColor: 'rgba(75, 192, 192, 0.4)', 18 | borderColor: 'rgba(75, 192, 192, 1)', 19 | borderWidth: 1, 20 | }, 21 | ], 22 | }; 23 | 24 | const options = { 25 | responsive: true, 26 | maintainAspectRatio: false, 27 | scales: { 28 | y: { 29 | beginAtZero: true, 30 | }, 31 | }, 32 | plugins: { 33 | legend: { 34 | display: true, 35 | }, 36 | }, 37 | }; 38 | 39 | return ( 40 | 52 | 53 | Response Times for /cache Endpoint 54 | 55 | 64 | 65 | 66 | 67 | ); 68 | 69 | }; 70 | 71 | export default ResponseTimeChart; 72 | -------------------------------------------------------------------------------- /client/src/components/SubmitButton.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from 'react'; 2 | import { Button } from '@mui/material'; 3 | 4 | interface SubmitButtonProps { 5 | cacheSwitch: boolean; 6 | querySelect: string; 7 | label?: string; 8 | onClick?: () => void; 9 | disabled?: boolean; 10 | cacheHits: number; 11 | cacheMisses: number; 12 | responseTimes: number[]; 13 | queryResult: any; 14 | cacheSize: number; 15 | cacheStatus: string; 16 | setCacheHits: React.Dispatch>; 17 | setCacheMisses: React.Dispatch>; 18 | setResponseTimes: React.Dispatch>; 19 | setQueryResult: React.Dispatch>; 20 | setCacheSize: React.Dispatch>; 21 | setCacheStatus: React.Dispatch>; 22 | } 23 | 24 | const SubmitButton: React.FC = ({ 25 | cacheSwitch, 26 | querySelect, 27 | cacheHits, 28 | cacheMisses, 29 | responseTimes, 30 | cacheStatus, 31 | queryResult, 32 | cacheSize, 33 | setCacheHits, 34 | setCacheMisses, 35 | setResponseTimes, 36 | setQueryResult, 37 | setCacheSize, 38 | setCacheStatus, 39 | label = 'Submit', 40 | onClick, 41 | disabled = false, 42 | }) => { 43 | // const showNotification = () => { 44 | // if (Notification.permission === 'granted') { 45 | // new Notification('Button was clicked'); 46 | // } else if (Notification.permission !== 'denied') { 47 | // Notification.requestPermission().then((permission) => { 48 | // if (permission === 'granted') { 49 | // new Notification('Button was clicked'); 50 | // } 51 | // }); 52 | // } 53 | // }; 54 | 55 | const fetchChartData = async () => { 56 | try { 57 | const cacheHitMissReponse = await fetch('/cache-analytics'); // fetches from endpoint 58 | const cacheHitMissData = await cacheHitMissReponse.json(); // converts to Javascript object 59 | setCacheHits(cacheHitMissData.cacheHits); // uses key to retrieve value and set state 60 | setCacheMisses(cacheHitMissData.cacheMisses); 61 | console.log("CacheHitMissData.status: ", cacheHitMissData.status) 62 | setCacheStatus(cacheHitMissData.status); 63 | 64 | 65 | const responseTimesResponse = await fetch('/cache-response-times'); 66 | const responseTimesData = await responseTimesResponse.json(); 67 | setResponseTimes(responseTimesData); 68 | 69 | const cacheSizeResponse = await fetch('/cacheSize'); 70 | const cacheSizeData = await cacheSizeResponse.json(); 71 | setCacheSize(cacheSizeData); 72 | 73 | } catch (error) { 74 | console.error('Error fetching Chart Data:', error); 75 | } 76 | }; 77 | 78 | const handleClick = async () => { 79 | console.log('Cache Switch:', cacheSwitch); 80 | console.log('Query Select:', querySelect); 81 | // showNotification(); 82 | 83 | let endpoint = ''; // initialize endpoint to empty string 84 | let method: 'GET' | 'POST' = 'GET'; // default method will be POST 85 | 86 | switch (querySelect) { 87 | case 'insert': 88 | endpoint = '/data/cities'; 89 | method = 'POST'; 90 | break; 91 | case 'select': 92 | endpoint = cacheSwitch ? '/data/cache' : '/data/no-cache'; 93 | break; 94 | case 'costly': 95 | endpoint = cacheSwitch ? '/data/cache/costly' : '/data/no-cache/costly'; 96 | break; 97 | default: 98 | // Handle unexpected querySelect values if necessary 99 | break; 100 | } 101 | 102 | try { 103 | const response = await fetch(endpoint, { 104 | method: method, 105 | headers: { 106 | 'Content-Type': 'application/json', 107 | }, 108 | }); 109 | const data = await response.json(); 110 | console.log('Response:', data); 111 | setQueryResult(data); 112 | } catch (error) { 113 | console.error('Error fetching data:', error); 114 | } 115 | 116 | // fetch Chart Data 117 | fetchChartData(); 118 | 119 | if (onClick) { 120 | onClick(); 121 | } 122 | }; 123 | 124 | return ( 125 | 128 | ); 129 | }; 130 | 131 | export default SubmitButton; 132 | -------------------------------------------------------------------------------- /client/src/components/SummaryBarChart.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { BarChart, Bar, XAxis, YAxis, CartesianGrid, Tooltip, Legend, ResponsiveContainer } from 'recharts'; 3 | import { Box } from '@mui/material'; 4 | 5 | const SummaryBarChart = ({ cacheData, noCacheData }) => { 6 | const data = [ 7 | { name: 'Min', Cache: parseFloat(cacheData.min?.toFixed(3) || 0), 'No Cache': parseFloat(noCacheData.min?.toFixed(3) || 0) }, 8 | { name: 'Max', Cache: parseFloat(cacheData.max?.toFixed(3) || 0), 'No Cache': parseFloat(noCacheData.max?.toFixed(3) || 0) }, 9 | { name: 'Mean', Cache: parseFloat(cacheData.mean?.toFixed(3) || 0), 'No Cache': parseFloat(noCacheData.mean?.toFixed(3) || 0) }, 10 | { name: 'Stdev', Cache: parseFloat(cacheData.stddev?.toFixed(3) || 0), 'No Cache': parseFloat(noCacheData.stddev?.toFixed(3) || 0) }, 11 | { name: 'P50', Cache: parseFloat(cacheData.p50?.toFixed(3) || 0), 'No Cache': parseFloat(noCacheData.p50?.toFixed(3) || 0) }, 12 | { name: 'P95', Cache: parseFloat(cacheData.p95?.toFixed(3) || 0), 'No Cache': parseFloat(noCacheData.p95?.toFixed(3) || 0) }, 13 | { name: 'P99', Cache: parseFloat(cacheData.p99?.toFixed(3) || 0), 'No Cache': parseFloat(noCacheData.p99?.toFixed(3) || 0) }, 14 | ]; 15 | 16 | 17 | return ( 18 | 19 | 20 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | ); 34 | }; 35 | 36 | export default SummaryBarChart; 37 | 38 | 39 | -------------------------------------------------------------------------------- /client/src/components/SummaryContainer.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Box, Typography } from '@mui/material'; 3 | import { LoadingButton } from '@mui/lab'; 4 | import SummaryBarChart from './SummaryBarChart'; 5 | import FrequencyDistribution from './FrequencyDistribution'; 6 | import cacheDataJson from '../data/responseTimes-cache.json'; 7 | import noCacheDataJson from '../data/responseTimes-no-cache.json'; 8 | 9 | const fullBenchmarkDataCache: object = cacheDataJson; 10 | const fullBenchmarkDataNoCache: object = noCacheDataJson; 11 | 12 | const SummaryContainer: React.FC = () => { 13 | const [cacheData, setCacheData] = useState(null); 14 | const [noCacheData, setNoCacheData] = useState(null); 15 | const [loading, setLoading] = useState(false); 16 | 17 | const fetchCacheData = async () => { 18 | const requestBody = { 19 | clients: 5, // clients running in parallel 20 | requests: 500, // requests per client 21 | queryKey: 'SELECT_CITIES_COSTLY', 22 | 'Cache-Control': null 23 | }; 24 | 25 | try { 26 | const response = await fetch('/benchmark', { 27 | method: 'POST', 28 | headers: { 29 | 'Content-Type': 'application/json' 30 | }, 31 | body: JSON.stringify(requestBody) 32 | }); 33 | 34 | if (!response.ok) { 35 | throw new Error('Network response was not ok'); 36 | } 37 | 38 | const result = await response.json(); 39 | setCacheData(result); 40 | } catch (error) { 41 | console.error('Fetch cache data failed, using fallback data', error); 42 | setCacheData(fullBenchmarkDataCache); 43 | } 44 | }; 45 | 46 | 47 | const fetchNoCacheData = async () => { 48 | const requestBody = { 49 | clients: 5, // clients running in parallel 50 | requests: 500, // requests per client 51 | queryKey: 'SELECT_CITIES_COSTLY', 52 | 'Cache-Control': 'no-cache' 53 | }; 54 | 55 | try { 56 | const response = await fetch('/benchmark', { 57 | method: 'POST', 58 | headers: { 59 | 'Content-Type': 'application/json' 60 | }, 61 | body: JSON.stringify(requestBody) 62 | }); 63 | 64 | if (!response.ok) { 65 | throw new Error('Network response was not ok'); 66 | } 67 | 68 | const result = await response.json(); 69 | setNoCacheData(result); 70 | } catch (error) { 71 | console.error('Fetch no cache data failed, using fallback data', error); 72 | setNoCacheData(fullBenchmarkDataNoCache); 73 | console.log('noCacheData', noCacheData); 74 | } 75 | }; 76 | 77 | 78 | const fetchData = async () => { 79 | setLoading(true); 80 | 81 | try { 82 | await fetchCacheData(); 83 | await fetchNoCacheData(); 84 | } catch (error) { 85 | console.error('Error fetching data:', error); 86 | } finally { 87 | setLoading(false); 88 | } 89 | }; 90 | 91 | return ( 92 | 93 | 99 | Run Benchmark Test 100 | 101 | {cacheData && noCacheData && ( 102 | <> 103 | 104 | Summary Statistics 105 | 106 | 107 | 108 | Frequency Distribution 109 | 110 | 111 | 112 | )} 113 | 114 | ); 115 | }; 116 | 117 | export default SummaryContainer; 118 | 119 | -------------------------------------------------------------------------------- /client/src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | CacheMunk 7 | 8 | 9 |
10 | 11 | 12 | -------------------------------------------------------------------------------- /client/src/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom/client'; 3 | import App from './App'; 4 | import './stylesheets/styles.css'; 5 | 6 | const rootElement = document.getElementById('root'); 7 | if (rootElement) { 8 | const root = ReactDOM.createRoot(rootElement); 9 | root.render( 10 | 11 | 12 | , 13 | ); 14 | } 15 | -------------------------------------------------------------------------------- /client/src/stylesheets/styles.css: -------------------------------------------------------------------------------- 1 | .button { 2 | background-color: #ff5722 !important; /* Using !important to ensure override */ 3 | color: white !important; 4 | border: none !important; 5 | padding: 10px 20px !important; 6 | font-size: 16px !important; 7 | border-radius: 8px !important; 8 | box-shadow: 0 4px 8px rgba(0, 0, 0, 0.2) !important; 9 | transition: background-color 0.3s ease-in-out !important; 10 | } 11 | 12 | .frame .image { 13 | height: 128px; 14 | left: 198px; 15 | object-fit: cover; 16 | position: absolute; 17 | top: 124px; 18 | width: 135px; 19 | } 20 | 21 | header { 22 | text-align: center; 23 | padding: 20px; 24 | } 25 | 26 | .main-content { 27 | display: flex; 28 | justify-content: space-around; 29 | align-items: flex-start; 30 | padding: 20px; 31 | } 32 | 33 | .section-container { 34 | width: 45%; /* Adjust the width as needed */ 35 | text-align: center; 36 | padding: 20px; 37 | border: 2px solid #000; /* Adding a black border outline */ 38 | box-shadow: 0px 4px 8px rgba(0, 0, 0, 0.1); /* Adding a subtle shadow for better visibility */ 39 | } 40 | 41 | .doughnut-chart-container { 42 | width: 100%; 43 | max-width: 500px; 44 | margin: 0 auto; /* Center the chart horizontally */ 45 | } 46 | 47 | .bar-chart-container { 48 | width: 100%; 49 | height: 500px; 50 | max-width: 500px; 51 | margin: 0 auto; /* Center the chart horizontally */ 52 | } 53 | 54 | button { 55 | display: block; 56 | margin: 10px auto; /* Center the button horizontally */ 57 | } 58 | -------------------------------------------------------------------------------- /client/src/theme.ts: -------------------------------------------------------------------------------- 1 | import { createTheme } from '@mui/material/styles'; 2 | import { indigo, deepPurple } from '@mui/material/colors'; 3 | 4 | const theme = createTheme({ 5 | palette: { 6 | mode: 'dark', // Enable dark mode 7 | primary: { 8 | main: deepPurple[500], 9 | }, 10 | secondary: { 11 | main: indigo[500], 12 | }, 13 | background: { 14 | default: '#031033', // Dark background for the entire application 15 | paper: '#041a4d', // Dark background for components such as cards, sheets 16 | }, 17 | text: { 18 | primary: '#ffffff', // Light color text for better readability on dark backgrounds 19 | secondary: '#f5edf7', // Slightly muted text color for secondary text 20 | }, 21 | }, 22 | }); 23 | 24 | export default theme; 25 | -------------------------------------------------------------------------------- /client/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["src"], 3 | "compilerOptions": { 4 | "strict": true, 5 | "jsx": "react-jsx", 6 | "target": "es2022", 7 | "module": "esnext", 8 | "moduleResolution": "bundler", 9 | "lib": ["DOM", "ES2022"], 10 | "outDir": "./dist" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /client/webpack.config.js: -------------------------------------------------------------------------------- 1 | import path from 'node:path'; 2 | import process from 'node:process'; 3 | import { fileURLToPath } from 'node:url'; 4 | import HtmlWebpackPlugin from 'html-webpack-plugin'; 5 | import MiniCssExtractPlugin from 'mini-css-extract-plugin'; 6 | 7 | const __filename = fileURLToPath(import.meta.url); 8 | const __dirname = path.dirname(__filename); 9 | 10 | export default { 11 | mode: process.env.NODE_ENV, 12 | entry: './client/src/index.tsx', 13 | output: { 14 | filename: 'bundle.js', 15 | path: path.resolve(__dirname, 'dist'), 16 | }, 17 | module: { 18 | rules: [ 19 | { 20 | test: /\.(js|jsx|ts|tsx)$/, 21 | exclude: /node_modules/, 22 | use: { 23 | loader: 'babel-loader', 24 | options: { 25 | presets: [ 26 | ['@babel/preset-env', { targets: 'defaults' }], 27 | ['@babel/preset-react', { runtime: 'automatic' }], 28 | '@babel/preset-typescript', 29 | ], 30 | }, 31 | }, 32 | }, 33 | { 34 | test: /\.css$/i, 35 | use: [MiniCssExtractPlugin.loader, 'css-loader'], 36 | }, 37 | ], 38 | }, 39 | resolve: { 40 | extensions: ['.js', '.jsx', '.ts', '.tsx'], 41 | }, 42 | plugins: [ 43 | new HtmlWebpackPlugin({ 44 | template: './client/src/index.html', // Path to your index.html 45 | filename: 'index.html', 46 | }), 47 | new MiniCssExtractPlugin({ 48 | filename: 'bundle.css', 49 | }), 50 | ], 51 | devServer: { 52 | static: { 53 | directory: path.resolve(__dirname, 'public'), 54 | publicPath: '/', 55 | }, 56 | proxy: [ 57 | { 58 | context: ['/data', '/test', '/cache-analytics', '/cache-response-times', '/deleteCache', '/cacheSize', '/benchmark'], 59 | target: 'http://localhost:3030', 60 | }, 61 | ], 62 | }, 63 | }; 64 | -------------------------------------------------------------------------------- /compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redis: 3 | image: redis:alpine 4 | container_name: redis-alpine 5 | ports: 6 | - "6379:6379" 7 | postgres: 8 | image: postgres:alpine 9 | container_name: postgres-alpine 10 | environment: 11 | POSTGRES_DB: cities 12 | POSTGRES_USER: postgres 13 | POSTGRES_PASSWORD: mypassword 14 | ports: 15 | - "5432:5432" 16 | volumes: 17 | - ./psql-cities-data/world.sql:/docker-entrypoint-initdb.d/world.sql 18 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js'; 2 | import tseslint from 'typescript-eslint'; 3 | import react from 'eslint-plugin-react'; 4 | import reactRecommended from 'eslint-plugin-react/configs/recommended.js'; 5 | import globals from 'globals'; 6 | const { node, browser, mocha } = globals; 7 | 8 | export default [ 9 | js.configs.recommended, 10 | ...tseslint.configs.strictTypeChecked, 11 | ...tseslint.configs.stylisticTypeChecked, 12 | { 13 | languageOptions: { 14 | parserOptions: { 15 | project: true, 16 | tsconfigRootDir: import.meta.dirname, 17 | }, 18 | }, 19 | }, 20 | { 21 | files: ['**/*.js'], 22 | ...tseslint.configs.disableTypeChecked, 23 | }, 24 | { 25 | rules: { 26 | 'require-atomic-updates': 'error', 27 | 'arrow-body-style': ['error', 'as-needed'], 28 | eqeqeq: 'error', 29 | 'no-console': 'warn', 30 | 'prefer-const': 'error', 31 | }, 32 | }, 33 | { 34 | files: ['server/**/*.{js,ts}'], 35 | languageOptions: { 36 | globals: { 37 | ...node, 38 | }, 39 | }, 40 | rules: { 41 | '@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }], 42 | }, 43 | }, 44 | { 45 | files: ['test/**/*.{js,ts}'], 46 | languageOptions: { 47 | globals: { 48 | ...node, 49 | ...mocha, 50 | }, 51 | }, 52 | }, 53 | { 54 | files: ['client/**/*.{js,jsx,ts,tsx}'], 55 | ...reactRecommended, 56 | plugins: { 57 | react, 58 | }, 59 | languageOptions: { 60 | ...reactRecommended.languageOptions, 61 | parserOptions: { 62 | ecmaFeatures: { 63 | jsx: true, 64 | }, 65 | }, 66 | globals: { 67 | ...browser, 68 | }, 69 | }, 70 | settings: { 71 | react: { 72 | version: 'detect', 73 | }, 74 | }, 75 | }, 76 | { 77 | files: ['client/**/*.{js,jsx,ts,tsx}'], 78 | rules: { 79 | 'react/react-in-jsx-scope': 'off', 80 | 'react/jsx-uses-react': 'off', 81 | 'react/prop-types': 'off', 82 | '@typescript-eslint/no-unused-vars': [ 83 | 'error', 84 | { argsIgnorePattern: '^_', destructuredArrayIgnorePattern: '^_' }, 85 | ], 86 | }, 87 | }, 88 | { 89 | ignores: ['client/dist', 'server/dist/', 'test/dist'], 90 | }, 91 | ]; 92 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cachemunk", 3 | "version": "1.0.0", 4 | "description": "", 5 | "type": "module", 6 | "main": "index.js", 7 | "scripts": { 8 | "start": "node server/dist/server.js", 9 | "server": "nodemon server/dist/server.js", 10 | "client": "NODE_ENV=development webpack serve --config client/webpack.config.js --open", 11 | "build:client": "NODE_ENV=production webpack --config client/webpack.config.js", 12 | "build:server": "tsc -p server/tsconfig.json", 13 | "build:server:watch": "tsc -p server/tsconfig.json --watch", 14 | "lint": "eslint .", 15 | "test": "tsc -p test/tsconfig.json && mocha --recursive" 16 | }, 17 | "author": "", 18 | "license": "MIT", 19 | "dependencies": { 20 | "@emotion/react": "^11.11.4", 21 | "@emotion/styled": "^11.11.5", 22 | "@mui/icons-material": "^5.15.18", 23 | "@mui/lab": "^5.0.0-alpha.170", 24 | "@mui/material": "^5.15.19", 25 | "@mui/x-charts": "^7.6.0", 26 | "chart.js": "^4.4.3", 27 | "cors": "^2.8.5", 28 | "dotenv": "^16.4.5", 29 | "express": "^4.19.2", 30 | "ioredis": "^5.4.1", 31 | "nodemon": "^3.1.0", 32 | "pg": "^8.11.5", 33 | "plotly.js": "^2.33.0", 34 | "react": "^18.3.1", 35 | "react-chartjs-2": "^5.2.0", 36 | "react-dom": "^18.3.1", 37 | "react-plotly.js": "^2.6.0", 38 | "react-router-dom": "^6.23.1", 39 | "recharts": "^2.12.7", 40 | "snappy": "^7.2.2" 41 | }, 42 | "devDependencies": { 43 | "@babel/core": "^7.24.5", 44 | "@babel/preset-env": "^7.24.5", 45 | "@babel/preset-react": "^7.24.1", 46 | "@babel/preset-typescript": "^7.24.1", 47 | "@eslint/js": "^9.2.0", 48 | "@types/chai": "^4.3.16", 49 | "@types/cors": "^2.8.17", 50 | "@types/express": "^4.17.21", 51 | "@types/mocha": "^10.0.6", 52 | "@types/node": "^20.12.12", 53 | "@types/pg": "^8.11.6", 54 | "@types/react": "^18.3.2", 55 | "@types/react-dom": "^18.3.0", 56 | "babel-loader": "^9.1.3", 57 | "chai": "^5.1.1", 58 | "css-loader": "^7.1.1", 59 | "eslint": "8.57", 60 | "eslint-plugin-react": "^7.34.1", 61 | "html-webpack-plugin": "^5.6.0", 62 | "mini-css-extract-plugin": "^2.9.0", 63 | "mocha": "^10.4.0", 64 | "prettier": "^3.2.5", 65 | "ts-node": "^10.9.2", 66 | "typescript": "^5.4.5", 67 | "typescript-eslint": "^7.9.0", 68 | "webpack": "^5.91.0", 69 | "webpack-cli": "^5.1.4", 70 | "webpack-dev-server": "^5.0.4" 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /server/src/analytics.ts: -------------------------------------------------------------------------------- 1 | // Array to store response times 2 | let cacheResponseTimes: number[] = []; 3 | 4 | // Object to store cache hits and cache misses counters 5 | const cacheInfo = { 6 | cacheHits: 0, 7 | cacheMisses: 0, 8 | status: '', 9 | }; 10 | 11 | export const addResponse = (execTime: number): void => { 12 | cacheResponseTimes.push(execTime); 13 | }; 14 | 15 | export const resetResponse = (): void => { 16 | cacheResponseTimes = []; 17 | }; 18 | 19 | export const incrCacheHits = (): void => { 20 | cacheInfo.cacheHits++; 21 | cacheInfo.status = 'CACHE_HIT'; 22 | }; 23 | 24 | export const incrCacheMisses = (): void => { 25 | cacheInfo.cacheMisses++; 26 | cacheInfo.status = 'CACHE_MISS'; 27 | }; 28 | 29 | export const resetCache = (): void => { 30 | cacheInfo.cacheHits = 0; 31 | cacheInfo.cacheMisses = 0; 32 | cacheInfo.status = ''; 33 | }; 34 | 35 | export const getCacheInfo = () => cacheInfo; 36 | 37 | export const getCacheResponseTimes = (): number[] => cacheResponseTimes; 38 | -------------------------------------------------------------------------------- /server/src/benchmarks/benchmark.ts: -------------------------------------------------------------------------------- 1 | import { percentile, calculateStdev } from '../util/stats.js'; 2 | import { timingFunc } from '../util/timing.js'; 3 | 4 | // DEFINE TESTING PARAMETERS 5 | 6 | export const runTests = async ( 7 | clients: number, 8 | requests: number, 9 | queryKey: string, 10 | testFunc: () => Promise, 11 | ) => { 12 | const sendRequest = async () => timingFunc(testFunc); 13 | const delay = async (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); 14 | 15 | // function to run a client 16 | const runClient = async (): Promise => { 17 | const clientTimes: number[] = []; 18 | for (let i = 0; i < requests; i++) { 19 | const [, execTime] = await sendRequest(); 20 | clientTimes.push(execTime); 21 | } 22 | return clientTimes; 23 | }; 24 | 25 | // run clients in parallel 26 | const clientsArr: Promise[] = []; 27 | const start = performance.now(); 28 | for (let i = 0; i < clients; i++) { 29 | clientsArr.push(runClient()); 30 | await delay(5); 31 | } 32 | 33 | const res = await Promise.all(clientsArr); 34 | const end = performance.now(); 35 | const totalExecTime = end - start; 36 | 37 | const agg = res.reduce((prev, curr) => [...prev, ...curr]); 38 | 39 | agg.sort((a, b) => a - b); // sort in ascending order 40 | const min = agg[0]; 41 | const max = agg[agg.length - 1]; 42 | const n = agg.length; 43 | const sum = agg.reduce((acc, curr) => acc + curr); 44 | const avgRps = (n / totalExecTime) * 1000; 45 | const mean = sum / n; 46 | const stddev = calculateStdev(agg); 47 | const p50 = percentile(agg, 50); 48 | const p95 = percentile(agg, 95); 49 | const p99 = percentile(agg, 99); 50 | return { 51 | sum, 52 | totalExecTime, 53 | avgRps, 54 | min, 55 | max, 56 | mean, 57 | stddev, 58 | n, 59 | p50, 60 | p95, 61 | p99, 62 | values: agg, 63 | }; 64 | }; 65 | -------------------------------------------------------------------------------- /server/src/benchmarks/benchmarkWrite.ts: -------------------------------------------------------------------------------- 1 | import { getData } from '../controllers/cachingController.js'; 2 | import { percentile, calculateStdev, createHistogramBuckets } from '../util/stats.js'; 3 | import { timingFunc } from '../util/timing.js'; 4 | import { writeFile } from 'fs'; 5 | 6 | // DEFINE TESTING PARAMETERS 7 | const clients = 10; // number of simultaneous clients 8 | const requests = 500; // requests per client 9 | 10 | // Function to perform the benchmarking 11 | const runBenchmark = async (testFunc: () => Promise, fileName: string) => { 12 | const sendRequest = async () => timingFunc(testFunc); 13 | 14 | // function to run a client 15 | const runClient = async (): Promise => { 16 | const clientTimes: number[] = []; 17 | for (let i = 0; i < requests; i++) { 18 | const [, execTime] = await sendRequest(); 19 | clientTimes.push(execTime); 20 | } 21 | return clientTimes; 22 | }; 23 | 24 | // Start benchmark 25 | console.log('Starting tests'); 26 | 27 | // run clients in parallel 28 | const clientsArr: Promise[] = []; 29 | const start = performance.now(); 30 | for (let i = 0; i < clients; i++) { 31 | clientsArr.push(runClient()); 32 | } 33 | 34 | const res = await Promise.all(clientsArr); 35 | const end = performance.now(); 36 | const totalExecTime = end - start; 37 | 38 | const agg = res.reduce((prev, curr) => [...prev, ...curr]); 39 | 40 | agg.sort((a, b) => a - b); // sort in ascending order 41 | const min = agg[0]; 42 | const max = agg[agg.length - 1]; 43 | const n = agg.length; 44 | const avgRps = (n / totalExecTime) * 1000; 45 | const sum = agg.reduce((acc, curr) => acc + curr); 46 | const mean = sum / n; 47 | const stddev = calculateStdev(agg); 48 | const p50 = percentile(agg, 50); 49 | const p95 = percentile(agg, 95); 50 | const p99 = percentile(agg, 99); 51 | const [buckets, bucketLabels] = createHistogramBuckets(agg); 52 | console.log({ 53 | totalExecTime, 54 | avgRps, 55 | min, 56 | max, 57 | mean, 58 | stddev, 59 | n, 60 | p50, 61 | p95, 62 | p99, 63 | buckets, 64 | bucketLabels, 65 | }); 66 | 67 | const summaryStatistics: object = { 68 | totalExecTime, 69 | avgRps, 70 | min, 71 | max, 72 | mean, 73 | stddev, 74 | n, 75 | p50, 76 | p95, 77 | p99, 78 | buckets, 79 | bucketLabels, 80 | responseTimes: agg 81 | }; 82 | 83 | // Write the aggregated times to a JSON file 84 | writeFile(fileName, JSON.stringify(summaryStatistics, null, 2), err => { 85 | if (err) { 86 | console.error(`Failed to save response times to ${fileName}:`, err); 87 | } else { 88 | console.log(`Response times saved successfully to ${fileName}`); 89 | } 90 | }); 91 | }; 92 | 93 | // Run the benchmark with cache 94 | await runBenchmark(async () => getData('SELECT_CITIES_COSTLY'), 'responseTimes-cache.json'); 95 | 96 | // Run the benchmark without cache 97 | await runBenchmark(async () => getData('SELECT_CITIES_COSTLY', "no-cache"), 'responseTimes-no-cache.json'); 98 | -------------------------------------------------------------------------------- /server/src/benchmarks/mock/data.ts: -------------------------------------------------------------------------------- 1 | // generates (array of objects) for testing 2 | // target size in KB (bytes * 1024) when stringified 3 | export const generateData = (targetSize: number, generator: () => object): object[] => { 4 | const calcByteLen = (input: object): number => Buffer.byteLength(JSON.stringify(input)); 5 | 6 | // check that generator function is pure wrt. size of output in bytes 7 | const byteLen = calcByteLen(generator()); 8 | if (byteLen !== calcByteLen(generator())) { 9 | throw new Error('Generator function must be pure wrt. output size in bytes'); 10 | } 11 | 12 | const arr: object[] = []; 13 | 14 | // calculate number of iterations required to reach target 15 | const targetByteSize = targetSize * 1024; // convert KB to bytes 16 | const iterations = Math.ceil((targetByteSize - 1) / (byteLen + 1)); 17 | 18 | for (let i = 0; i < iterations; i++) { 19 | const item = generator(); 20 | arr.push(item); 21 | } 22 | 23 | return arr; 24 | }; 25 | -------------------------------------------------------------------------------- /server/src/benchmarks/mock/generator.ts: -------------------------------------------------------------------------------- 1 | import { nanoid } from 'nanoid'; 2 | 3 | // generate semi-random data in object format 4 | export const generator = (): object => ({ 5 | someRandomData: nanoid(), 6 | someOtherData: nanoid(), 7 | someOtherKey: 'someOtherCommonValue', 8 | }); 9 | -------------------------------------------------------------------------------- /server/src/cache/cache.ts: -------------------------------------------------------------------------------- 1 | import type { Redis } from 'ioredis'; 2 | import { compress, uncompress } from 'snappy'; 3 | 4 | type EventHandler = (queryKey: string, executionTime: number) => void; 5 | 6 | interface Config { 7 | redis: Redis; 8 | defaultTtl?: number; 9 | maxEntrySize?: number; 10 | onCacheHit?: EventHandler; 11 | onCacheMiss?: EventHandler; 12 | } 13 | // write cache in the functional style (creator function) 14 | // instead of class (OOP) syntax for stronger encapsulation 15 | export const configureCache = (options: Config) => { 16 | const { redis } = options; 17 | 18 | const cacheL1 = new Map(); 19 | 20 | // set default ttl to 1 hour (3600 seconds) 21 | const defaultTtl = options.defaultTtl && options.defaultTtl > 0 ? options.defaultTtl : 3600; 22 | 23 | // set default maxEntrySize to 5MB (5_000_000 bytes) 24 | const maxEntrySize = 25 | options.maxEntrySize && options.maxEntrySize > 0 ? options.maxEntrySize : 5_000_000; 26 | 27 | const { onCacheHit, onCacheMiss } = options; 28 | 29 | const calcExecTime = (start: bigint, end: bigint) => { 30 | const diff = end - start; 31 | return Number(diff) / 1_000_000; // convert nanoseconds to milliseconds 32 | }; 33 | 34 | // Function to add a query result to the cache 35 | async function set( 36 | queryKey: string, 37 | data: string | Buffer, 38 | dependencies: string[], 39 | ttlInSeconds = defaultTtl, // default to 1 hour in seconds 40 | ): Promise { 41 | // Capture initial timestamp for performance monitoring 42 | // const start = process.hrtime.bigint(); 43 | 44 | // Convert data to binary Buffer if it is a string 45 | const binaryData = typeof data === 'string' ? Buffer.from(data) : data; 46 | 47 | // check if binary Data exceeds maxEntrySize 48 | if (binaryData.length > maxEntrySize) { 49 | throw new Error('maxEntrySize exceeded'); 50 | } 51 | 52 | // Compress buffer to save bandwidth using snappy. To further compress buffer. ex: 10kb ->3 kb 53 | const compressedData = await compress(binaryData); 54 | 55 | if (dependencies.length > 0) { 56 | // Create a pipeline/transaction (ensure data integrity and consistency. If one fail, all fails) 57 | // const pipeline = redis.multi(); 58 | 59 | // Store the query result 60 | await redis.set(queryKey, compressedData, 'EX', ttlInSeconds); 61 | 62 | // Track dependencies 63 | dependencies.forEach((dependency) => { 64 | const dependencyKey = `dependency:${dependency}`; 65 | void (async () => { 66 | await redis.sadd(dependencyKey, queryKey); 67 | await redis.expire(dependencyKey, ttlInSeconds); // Set the TTL for the dependency key 68 | })(); 69 | }); 70 | 71 | // Execute the pipeline 72 | // await pipeline.exec(); 73 | } else { 74 | await redis.set(queryKey, compressedData, 'EX', ttlInSeconds); 75 | } 76 | 77 | // Capture final timestamp 78 | // const end = process.hrtime.bigint(); 79 | 80 | // console.log(`write data to cache in ${calcExecTime(start, end).toFixed(3)}`); 81 | } 82 | 83 | // Function to retrieve a cached query result 84 | async function get(queryKey: string): Promise { 85 | // Capture initial timestamp for performance monitoring 86 | const start = process.hrtime.bigint(); 87 | 88 | const fromL1Cache = cacheL1.get(queryKey); 89 | if (fromL1Cache) { 90 | return fromL1Cache; 91 | } 92 | 93 | // Retrieve the cached query result based on query key 94 | // const startReq = process.hrtime.bigint(); 95 | const compressedData = await redis.getBuffer(queryKey); 96 | // const endReq = process.hrtime.bigint(); 97 | 98 | // Handle cache miss 99 | if (!compressedData) { 100 | // this is a cache miss 101 | // to do: log cache miss 102 | const end = process.hrtime.bigint(); 103 | if (onCacheMiss) onCacheMiss(queryKey, calcExecTime(start, end)); 104 | // console.log(`cache miss in ${calcExecTime(start, end).toFixed(3)}`); 105 | return null; 106 | } 107 | 108 | // Decompress result 109 | // const startSnappy = process.hrtime.bigint(); 110 | const binaryData = await uncompress(compressedData); 111 | // const endSnappy = process.hrtime.bigint(); 112 | 113 | // Convert result to string 114 | const data = binaryData.toString(); 115 | 116 | if (!cacheL1.has(queryKey)) { 117 | cacheL1.set(queryKey, data); 118 | setTimeout(() => cacheL1.delete(queryKey), 50); 119 | } 120 | 121 | // Capture final timestamp 122 | const end = process.hrtime.bigint(); 123 | 124 | if (onCacheHit) onCacheHit(queryKey, calcExecTime(start, end)); 125 | // console.log(`response from redis in ${calcExecTime(startReq, endReq).toFixed(3)}`); 126 | // console.log(`compressed data size ${compressedData.length / 1000} KB`); 127 | // console.log(`decompression in ${calcExecTime(startSnappy, endSnappy).toFixed(3)}`); 128 | // console.log(`cache hit in ${calcExecTime(start, end).toFixed(3)}`); 129 | return data; 130 | } 131 | 132 | // Function to invalidate cache based on table updates 133 | async function invalidate(dependency: string) { 134 | // const start = process.hrtime.bigint(); 135 | 136 | const dependencyKey = `dependency:${dependency}`; 137 | 138 | const queriesToInvalidate = await redis.smembers(dependencyKey); 139 | 140 | if (queriesToInvalidate.length > 0) { 141 | // Create a pipeline to batch multiple operations 142 | // const pipeline = redis.multi(); 143 | 144 | queriesToInvalidate.forEach((queryKey) => void redis.del(queryKey)); 145 | await redis.del(dependencyKey); 146 | 147 | // await pipeline.exec(); 148 | } else { 149 | // Clear the dependency set if it's the only key 150 | await redis.del(dependencyKey); 151 | } 152 | 153 | // const end = process.hrtime.bigint(); 154 | 155 | // console.log(`cache invalidate in ${calcExecTime(start, end).toFixed(3)}`); 156 | } 157 | 158 | // Function to clear the cache 159 | async function clear(): Promise { 160 | try { 161 | const result = await redis.flushall(); 162 | console.log('Cache for the current database cleared', result); 163 | } catch (err) { 164 | console.error('Error clearing cache:', err); 165 | } 166 | } 167 | 168 | async function getSize(): Promise { 169 | try { 170 | const size = await redis.dbsize(); 171 | return size; 172 | } catch (err) { 173 | console.error('Error getting cache size', err); 174 | return 0; 175 | } 176 | } 177 | 178 | async function getStringKeySize(): Promise { 179 | let cursor = '0'; 180 | let stringKeyCount = 0; 181 | 182 | try { 183 | do { 184 | const [newCursor, keys] = await redis.scan(cursor, 'COUNT', 100); 185 | 186 | cursor = newCursor; 187 | for (const key of keys) { 188 | const type = await redis.type(key); 189 | if (type === 'string') { 190 | stringKeyCount++; 191 | } 192 | } 193 | } while (cursor !== '0'); 194 | 195 | return stringKeyCount; 196 | } catch (err) { 197 | console.error('Error getting string key size', err); 198 | return 0; 199 | } 200 | } 201 | 202 | return { set, get, invalidate, clear, getSize, getStringKeySize }; 203 | }; 204 | 205 | export default configureCache; 206 | -------------------------------------------------------------------------------- /server/src/cache/redisClient.ts: -------------------------------------------------------------------------------- 1 | import 'dotenv/config'; 2 | import { Redis } from 'ioredis'; 3 | import { configureCache } from './cache.js'; 4 | 5 | export const redis = new Redis({ host: process.env.REDIS_HOST, port: 6379 }); 6 | 7 | redis.on('connect', () => { 8 | console.log('Connected to Redis'); 9 | }); 10 | 11 | redis.on('error', () => { 12 | console.log('Redis error'); 13 | // Implement your error handling logic here 14 | }); 15 | 16 | redis.on('reconnecting', (delay: number) => { 17 | console.log(`Reconnecting to Redis in ${delay.toString()}ms`); 18 | }); 19 | 20 | redis.on('end', () => { 21 | console.log('Disconnected from Redis'); 22 | // You can handle graceful shutdown or cleanup tasks here 23 | }); 24 | 25 | export const cache = configureCache({ 26 | redis, 27 | }); 28 | 29 | export default cache; 30 | -------------------------------------------------------------------------------- /server/src/controllers/cacheSize.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response, NextFunction } from 'express'; 2 | import cache from '../cache/redisClient.js'; 3 | import { asyncWrapper } from './errorHandling.js'; 4 | 5 | export const getCacheSize = asyncWrapper( 6 | async (req: Request, res: Response, next: NextFunction) => { 7 | const result = await cache.getSize(); 8 | res.locals.cacheSize = result 9 | next(); 10 | }, 11 | ); 12 | 13 | export const getStringKeySize = asyncWrapper( 14 | async (req: Request, res: Response, next: NextFunction) => { 15 | const result = await cache.getStringKeySize(); 16 | res.locals.cacheSize = result 17 | next(); 18 | }, 19 | ); -------------------------------------------------------------------------------- /server/src/controllers/cachingController.ts: -------------------------------------------------------------------------------- 1 | import { queriesMap, dependenciesMap } from '../queries/queriesMap.js'; 2 | import { query } from '../db.js'; 3 | import type pg from 'pg'; 4 | 5 | import { incrCacheHits, incrCacheMisses, addResponse } from '../analytics.js'; 6 | import cache from '../cache/redisClient.js'; 7 | 8 | export const getData = async ( 9 | queryKey: string, 10 | cacheControl?: string, 11 | log = true, 12 | ): Promise<{ query: string; rows: pg.QueryResultRow }> => { 13 | const calcExecTime = (start: bigint, end: bigint) => Number(end - start) / 1_000_000; 14 | 15 | const t0 = process.hrtime.bigint(); 16 | 17 | const queryText = queriesMap[queryKey]; 18 | 19 | if (cacheControl !== 'no-cache') { 20 | // check the cache 21 | const cachedResult = await cache.get(queryKey); 22 | 23 | if (cachedResult) { 24 | const t1 = process.hrtime.bigint(); 25 | if (log) { 26 | incrCacheHits(); // incr cache hit counter 27 | addResponse(calcExecTime(t0, t1)); // add execution time to array 28 | } 29 | return { 30 | query: queryText, 31 | rows: JSON.parse(cachedResult) as pg.QueryResultRow, 32 | }; 33 | } 34 | } 35 | 36 | // if the result is not cached, we need to query the DB 37 | const result = await query(queryText); 38 | 39 | if (cacheControl !== 'no-cache') { 40 | // we cache the results from PostreSQL 41 | await cache.set(queryKey, JSON.stringify(result.rows), dependenciesMap[queryKey]); 42 | } 43 | 44 | const t1 = process.hrtime.bigint(); 45 | 46 | // Time from cache miss and time querying from database 47 | if (log) { 48 | addResponse(calcExecTime(t0, t1)); 49 | incrCacheMisses(); 50 | } 51 | 52 | return { query: queryText, rows: result.rows }; 53 | }; 54 | -------------------------------------------------------------------------------- /server/src/controllers/deleteCache.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response, NextFunction } from 'express'; 2 | import cache from '../cache/redisClient.js'; 3 | import { asyncWrapper } from './errorHandling.js'; 4 | import { resetResponse, resetCache } from '../analytics.js'; 5 | 6 | export const deleteCache = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 7 | await cache.clear(); 8 | resetResponse(); 9 | resetCache(); 10 | next(); 11 | }); 12 | -------------------------------------------------------------------------------- /server/src/controllers/dynamicController.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response, NextFunction } from 'express'; 2 | import { asyncWrapper } from './errorHandling.js'; 3 | import cache from '../cache/redisClient.js'; 4 | import { query } from '../db.js'; 5 | 6 | //Req is an object with custom name and state id - sending other values as well 7 | 8 | export const dynamicQuery = asyncWrapper( 9 | async (req: Request, res: Response, next: NextFunction) => { 10 | //need custom insert name 11 | //need custom values to insert 12 | // const req.body = { 13 | // "name": "", 14 | // "state_code": "ID", 15 | // "country_id": 233, 16 | // "country_code": "US", 17 | // "latitude": "38.842460", 18 | // "longitude": "-84.021296", 19 | // "flag": true, 20 | // "state_id": 1460 21 | // } 22 | 23 | const { name, state_code, country_id, country_code, latitude, longitude, flag, state_id } = 24 | req.body; 25 | 26 | const queryText = `INSERT INTO cities ( 27 | name, state_code, country_id, country_code, latitude, longitude, flag, state_id 28 | ) VALUES ( 29 | $1, $2, $3, $4, $5, $6, $7, $8 30 | ) 31 | `; 32 | 33 | const values = [ 34 | name, 35 | state_code, 36 | country_id, 37 | country_code, 38 | latitude, 39 | longitude, 40 | flag, 41 | state_id, 42 | ]; 43 | 44 | try { 45 | const result = await query(queryText, values); 46 | 47 | // invalidate cache here 48 | await cache.invalidate('cities'); 49 | 50 | console.log(result); 51 | } catch (err) { 52 | console.log(err); 53 | } 54 | 55 | //This is an update test for dev 56 | 57 | next(); 58 | }, 59 | ); 60 | -------------------------------------------------------------------------------- /server/src/controllers/errorHandling.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response, NextFunction } from 'express'; 2 | 3 | /** 4 | * Async middleware wrapper for Express. 5 | * Wraps an asynchronous function, catching any errors and passing them to the next middleware. 6 | * 7 | * @param fn The asynchronous middleware function. 8 | * @returns A function that takes Express's req, res, and next parameters. 9 | */ 10 | export function asyncWrapper( 11 | fn: (req: Request, res: Response, next: NextFunction) => Promise, 12 | ) { 13 | return (req: Request, res: Response, next: NextFunction): void => { 14 | fn(req, res, next).catch((err: unknown) => { 15 | next(err); 16 | }); 17 | }; 18 | } 19 | -------------------------------------------------------------------------------- /server/src/controllers/insertCity.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response, NextFunction } from 'express'; 2 | import { asyncWrapper } from './errorHandling.js'; 3 | import cache from '../cache/redisClient.js'; 4 | import { query } from '../db.js'; 5 | 6 | export const insertCity = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 7 | const city = { 8 | name: 'Aalphabet City', 9 | state_id: 1452, 10 | state_code: 'NY', 11 | country_id: 233, 12 | country_code: 'US', 13 | latitude: '43.80923000', 14 | longitude: '-76.02409000', 15 | created_at: '2019-10-06T01:01:43.000Z', 16 | updated_at: '2019-10-06T01:01:43.000Z', 17 | flag: true, 18 | wikidataid: 'Q2417063', 19 | }; 20 | 21 | const queryText = ` 22 | INSERT INTO cities ( 23 | name, state_id, state_code, country_id, country_code, latitude, longitude, 24 | created_at, updated_at, flag, wikidataid 25 | ) VALUES ( 26 | $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11 27 | ) 28 | `; 29 | 30 | const values = [ 31 | city.name, 32 | city.state_id, 33 | city.state_code, 34 | city.country_id, 35 | city.country_code, 36 | city.latitude, 37 | city.longitude, 38 | city.created_at, 39 | city.updated_at, 40 | city.flag, 41 | city.wikidataid, 42 | ]; 43 | 44 | try { 45 | const result = await query(queryText, values); 46 | 47 | // invalidate cache here 48 | await cache.invalidate('cities'); 49 | 50 | console.log(result); 51 | } catch (err) { 52 | console.log(err); 53 | } 54 | 55 | next(); 56 | }); 57 | -------------------------------------------------------------------------------- /server/src/db.ts: -------------------------------------------------------------------------------- 1 | import 'dotenv/config'; 2 | import pg from 'pg'; 3 | const { Pool } = pg; 4 | 5 | // Destructure environment variables 6 | const { PG_HOST, PG_PORT, PG_USER, PG_PASSWORD, PG_DB } = process.env; 7 | 8 | // Validate required environment variables 9 | const requiredEnvVars = ['PG_HOST', 'PG_PORT', 'PG_USER', 'PG_PASSWORD', 'PG_DB']; 10 | requiredEnvVars.forEach((varName) => { 11 | if (!process.env[varName]) { 12 | console.log(`Missing required environment variable: ${varName}`); 13 | } 14 | }); 15 | 16 | // Create a new Pool instance 17 | export const pool = new Pool({ 18 | user: PG_USER, 19 | host: PG_HOST, 20 | database: PG_DB, 21 | password: PG_PASSWORD, 22 | port: PG_PORT ? parseInt(PG_PORT) : 5432, 23 | ssl: { 24 | rejectUnauthorized: false, 25 | }, 26 | }); 27 | 28 | // Export a query function 29 | export const query = async ( 30 | text: string, 31 | params?: (string | number | boolean)[], 32 | ): Promise => { 33 | try { 34 | const result = await pool.query(text, params); 35 | // eslint-disable-next-line no-console 36 | // console.log(`query ${text} executed`); 37 | return result; 38 | } catch (err) { 39 | // eslint-disable-next-line no-console 40 | console.log(`Error executing query ${text.slice(0, 20)}:`, err); 41 | } 42 | }; 43 | -------------------------------------------------------------------------------- /server/src/queries/queries.ts: -------------------------------------------------------------------------------- 1 | // select cities and join with countries 2 | export const SELECT_CITIES = `SELECT * 3 | FROM cities 4 | LIMIT 20`; 5 | 6 | export const SELECT_CITIES_COSTLY = `SELECT cities.*, 7 | countries.name as country_name, 8 | states.name as state 9 | FROM countries 10 | INNER JOIN cities 11 | ON cities.country_id = countries.id 12 | INNER JOIN states 13 | on states.id = cities.state_id 14 | WHERE countries.name LIKE '%ted Stat%' 15 | AND states.name LIKE '%York%' 16 | ORDER BY name asc, latitude desc, longitude desc 17 | LIMIT 100;`; 18 | -------------------------------------------------------------------------------- /server/src/queries/queriesMap.ts: -------------------------------------------------------------------------------- 1 | import { SELECT_CITIES, SELECT_CITIES_COSTLY } from './queries.js'; 2 | 3 | export const queriesMap: Record = { 4 | SELECT_CITIES_COSTLY: SELECT_CITIES_COSTLY, 5 | SELECT_CITIES: SELECT_CITIES, 6 | }; 7 | 8 | export const dependenciesMap: Record = { 9 | SELECT_CITIES_COSTLY: ['cities', 'cities:new-york'], 10 | SELECT_CITIES: ['cities'], 11 | }; 12 | -------------------------------------------------------------------------------- /server/src/routers/cacheRouter.ts: -------------------------------------------------------------------------------- 1 | import { Router, Request, Response, NextFunction } from 'express'; 2 | import { asyncWrapper } from '../controllers/errorHandling.js'; 3 | import { getData } from '../controllers/cachingController.js'; 4 | import { queriesMap, dependenciesMap } from '../queries/queriesMap.js'; 5 | 6 | const router = Router(); 7 | 8 | const getCities = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 9 | const queryKey = 'SELECT_CITIES'; 10 | const result = await getData(queryKey); 11 | res.locals.data = result; 12 | next(); 13 | }); 14 | 15 | const getCitiesCostly = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 16 | const queryKey = 'SELECT_CITIES_COSTLY'; 17 | const result = await getData(queryKey); 18 | res.locals.data = result; 19 | next(); 20 | }); 21 | 22 | const getDynamicSelect = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 23 | const customQuery: string = (req.body as { query: string }).query; 24 | queriesMap[customQuery] = customQuery; 25 | const tableName: string | null = extractTableName(customQuery); 26 | if (tableName) { 27 | dependenciesMap[customQuery] = [tableName]; 28 | } else { 29 | dependenciesMap[customQuery] = []; 30 | } 31 | const result = await getData(customQuery); 32 | // console.log('result:', result); 33 | console.log('dependenciesMap:', dependenciesMap); 34 | res.locals.data = result; 35 | next(); 36 | }); 37 | 38 | function extractTableName(query: string): string | null { 39 | // Define the regex pattern to match the table name 40 | const regex = /from\s+([a-zA-Z_][a-zA-Z0-9_]*)/i; 41 | 42 | // Execute the regex on the query 43 | const match = query.match(regex); 44 | 45 | // If a match is found, return the table name, otherwise return null 46 | return match ? match[1] : null; 47 | } 48 | 49 | router.get('/', getCities, (req: Request, res: Response) => { 50 | res.json(res.locals.data); 51 | }); 52 | 53 | router.get('/costly', getCitiesCostly, (req: Request, res: Response) => { 54 | res.json(res.locals.data); 55 | }); 56 | 57 | //dynamic select query router 58 | //logic still needed for cached dynamic select 59 | router.post('/dynamic-select', getDynamicSelect, (req: Request, res: Response) => { 60 | // console.log('res.locals.data:', res.locals.data); 61 | res.json(res.locals.data); 62 | }); 63 | 64 | export default router; 65 | -------------------------------------------------------------------------------- /server/src/routers/dataRouter.ts: -------------------------------------------------------------------------------- 1 | import { Router, Request, Response } from 'express'; 2 | import cacheRouter from './cacheRouter.js'; 3 | import noCacheRouter from './no-cacheRouter.js'; 4 | import { insertCity } from '../controllers/insertCity.js'; 5 | import { dynamicQuery } from '../controllers/dynamicController.js'; 6 | 7 | const router = Router(); 8 | 9 | router.use('/cache', cacheRouter); 10 | 11 | router.use('/no-cache', noCacheRouter); 12 | 13 | router.post('/cities', insertCity, (req: Request, res: Response) => { 14 | // invalidate the cache based on specified dependencies 15 | // make insert query to the SQL database 16 | res.status(201).json({ message: 'city inserted in db' }); 17 | }); 18 | 19 | router.post('/dynamic-insert', dynamicQuery, (req: Request, res: Response) => { 20 | // invalidate the cache 21 | // make insert query ot the SQL database 22 | res.status(201).json({ message: 'db insertion complete' }); 23 | }); 24 | 25 | export default router; 26 | -------------------------------------------------------------------------------- /server/src/routers/no-cacheRouter.ts: -------------------------------------------------------------------------------- 1 | import { Router, Request, Response, NextFunction } from 'express'; 2 | import { SELECT_CITIES, SELECT_CITIES_COSTLY } from '../queries/queries.js'; 3 | import { query } from '../db.js'; 4 | import { asyncWrapper } from '../controllers/errorHandling.js'; 5 | import { incrCacheMisses, addResponse } from '../analytics.js'; 6 | 7 | const router = Router(); 8 | 9 | const calcExecTime = (start: bigint, end: bigint) => Number(end - start) / 1_000_000; 10 | 11 | const getCities = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 12 | const queryText = SELECT_CITIES; 13 | const t0 = process.hrtime.bigint(); 14 | const result = await query(queryText); 15 | const t1 = process.hrtime.bigint(); 16 | 17 | addResponse(calcExecTime(t0, t1)); 18 | incrCacheMisses(); 19 | 20 | res.locals.data = { query: queryText, rows: result.rows }; 21 | next(); 22 | }); 23 | 24 | const getCitiesCostly = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 25 | const queryText = SELECT_CITIES_COSTLY; 26 | const t0 = process.hrtime.bigint(); 27 | const result = await query(queryText); 28 | const t1 = process.hrtime.bigint(); 29 | 30 | addResponse(calcExecTime(t0, t1)); 31 | incrCacheMisses(); 32 | 33 | res.locals.data = { query: queryText, rows: result.rows }; 34 | next(); 35 | }); 36 | 37 | const getDynamicSelect = asyncWrapper(async (req: Request, res: Response, next: NextFunction) => { 38 | const queryText = req.body.query; 39 | const t0 = process.hrtime.bigint(); 40 | const result = await query(queryText); 41 | const t1 = process.hrtime.bigint(); 42 | 43 | addResponse(calcExecTime(t0, t1)); 44 | incrCacheMisses(); 45 | 46 | res.locals.data = { query: queryText, rows: result.rows }; 47 | next(); 48 | }); 49 | 50 | router.get('/', getCities, (req: Request, res: Response) => { 51 | res.json(res.locals.data); 52 | }); 53 | 54 | router.get('/costly', getCitiesCostly, (req: Request, res: Response) => { 55 | res.json(res.locals.data); 56 | }); 57 | 58 | //dynamic select query router 59 | router.post('/dynamic-select', getDynamicSelect, (req: Request, res: Response) => { 60 | console.log('dynamic router'); 61 | res.json(res.locals.data); 62 | }); 63 | 64 | export default router; 65 | -------------------------------------------------------------------------------- /server/src/server.ts: -------------------------------------------------------------------------------- 1 | import express, { Request, Response, NextFunction } from 'express'; 2 | import path from 'node:path'; 3 | import { pool } from './db.js'; 4 | import { cache } from './cache/redisClient.js'; 5 | import { getData } from './controllers/cachingController.js'; 6 | import { runTests } from './benchmarks/benchmark.js'; 7 | import dataRouter from './routers/dataRouter.js'; 8 | import { getCacheInfo, getCacheResponseTimes } from './analytics.js'; 9 | import { getStringKeySize } from './controllers/cacheSize.js'; 10 | import { deleteCache } from './controllers/deleteCache.js'; 11 | 12 | const app = express(); 13 | 14 | // specify the port number to listen on 15 | const PORT = process.env.PORT ? parseInt(process.env.PORT) : 3030; 16 | 17 | // Function to test the database connection on startup 18 | void (async () => { 19 | try { 20 | const client = await pool.connect(); 21 | await client.query('SELECT NOW()'); 22 | // eslint-disable-next-line no-console 23 | console.log('Database connected successfully!!!'); 24 | client.release(); 25 | } catch (err) { 26 | // eslint-disable-next-line no-console 27 | console.log('Error connecting to the database:'); 28 | } 29 | })(); 30 | 31 | // Function to test the cache connection on startup 32 | void (async () => { 33 | try { 34 | await cache.set('ping', 'pong', []); 35 | 36 | const res = await cache.get('ping'); 37 | if (res === 'pong') { 38 | console.log('Successfully received pong from redis!'); 39 | console.log('version updated 10:57pm'); 40 | } 41 | } catch (err) { 42 | // eslint-disable-next-line no-console 43 | console.log('Error connecting to redis oops:'); 44 | } 45 | })(); 46 | 47 | // express middleware that parses JSON bodies 48 | app.use(express.json()); 49 | 50 | app.use(express.static(path.resolve('client/public'))); 51 | app.use(express.static(path.resolve('client/dist'))); 52 | 53 | app.get('/', (req, res) => { 54 | res.sendFile(path.resolve('client/dist/index.html')); 55 | }); 56 | 57 | app.use('/summary', (req, res) => { 58 | res.sendFile(path.resolve('client/dist/index.html')); 59 | }); 60 | 61 | // route for all database requests 62 | app.use('/data', dataRouter); 63 | 64 | // Endpoint to get cache-analytics 65 | app.get('/cache-analytics', (req, res) => { 66 | const cacheInfo = getCacheInfo(); 67 | res.status(200).json(cacheInfo); 68 | }); 69 | 70 | interface BenchmarkBody { 71 | clients: number; 72 | requests: number; 73 | queryKey: string; 74 | 'Cache-Control': string; 75 | } 76 | 77 | app.post('/benchmark', (req, res) => { 78 | const payload = req.body as BenchmarkBody; 79 | const { clients, requests, queryKey } = payload; 80 | const cacheControl = payload['Cache-Control']; 81 | 82 | let testFunc; 83 | if (cacheControl === 'no-cache') { 84 | // eslint-disable-next-line @typescript-eslint/require-await 85 | testFunc = async () => getData(queryKey, 'no-cache', false); 86 | } else { 87 | // eslint-disable-next-line @typescript-eslint/require-await 88 | testFunc = async () => getData(queryKey, '', false); 89 | } 90 | 91 | runTests(clients, requests, queryKey, testFunc) 92 | .then((data) => { 93 | res.json(data); 94 | }) 95 | .catch((err: unknown) => { 96 | console.log(err); 97 | res.status(500).json('unknown error'); 98 | }); 99 | }); 100 | 101 | // Endpoint to get response times for /cache (number[]) 102 | app.get('/cache-response-times', (req, res) => { 103 | console.log('Received request to /cache-response-times'); 104 | const cacheResponseTimes = getCacheResponseTimes(); 105 | res.status(200).json(cacheResponseTimes); 106 | }); 107 | 108 | // Endpoint to get the size of the redis cache 109 | app.get('/cacheSize', getStringKeySize, (req, res) => { 110 | res.status(200).json(res.locals.cacheSize); 111 | }); 112 | 113 | // End point to delete all redis cache 114 | app.get('/deleteCache', deleteCache, (req, res) => { 115 | res.status(200).send('cache, cachehit, cachemiss, and response time should all be deleted'); 116 | }); 117 | 118 | // 404 error handler 119 | app.use('*', (req, res) => { 120 | res.status(404).json('Page Not Found'); 121 | }); 122 | 123 | // Global error handler 124 | app.use( 125 | ( 126 | err: { log: string; status: number; message: string }, 127 | req: Request, 128 | res: Response, 129 | _next: NextFunction, 130 | ) => { 131 | console.log('global middlware error:', err); 132 | const defaultErr: object = { 133 | log: 'Express error handler caught unknown middleware error', 134 | status: 500, 135 | message: { err: 'An error occurred' }, 136 | }; 137 | 138 | const errorObj = Object.assign({}, defaultErr, err); 139 | res.status(errorObj.status).json(errorObj.message); 140 | }, 141 | ); 142 | 143 | app.listen(PORT, () => { 144 | console.log(`Listening on port ${PORT.toString()}`); 145 | }); 146 | -------------------------------------------------------------------------------- /server/src/util/stats.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Calculate the nth percentile of an array of numbers. 3 | * @param arr - The array of numbers. 4 | * @param percentile - The desired percentile (between 0 and 100). 5 | * @returns The nth percentile value. 6 | */ 7 | export const percentile = (arr: number[], percentile: number): number => { 8 | if (percentile < 0 || percentile > 100) { 9 | throw new Error('Percentile must be between 0 and 100.'); 10 | } 11 | if (arr.length === 0) { 12 | throw new Error('Array cannot be empty.'); 13 | } 14 | 15 | // Sort the array in ascending order 16 | const sortedArr = arr.slice().sort((a, b) => a - b); 17 | 18 | // Calculate the rank (position) of the percentile 19 | const rank = (percentile / 100) * (sortedArr.length - 1); 20 | 21 | // Determine the indices for interpolation 22 | const lowerIndex = Math.floor(rank); 23 | const upperIndex = Math.ceil(rank); 24 | 25 | // If the rank is an integer, return the element at that position 26 | if (lowerIndex === upperIndex) { 27 | return sortedArr[lowerIndex]; 28 | } 29 | 30 | // Otherwise, interpolate between the two bounding values 31 | const lowerValue = sortedArr[lowerIndex]; 32 | const upperValue = sortedArr[upperIndex]; 33 | const weight = rank - lowerIndex; 34 | 35 | return lowerValue + weight * (upperValue - lowerValue); 36 | }; 37 | 38 | export const calculateStdev = (numbers: number[]): number => { 39 | const n = numbers.length; 40 | const mean = numbers.reduce((sum, num) => sum + num, 0) / n; 41 | const variance = numbers.reduce((sum, num) => sum + Math.pow(num - mean, 2), 0) / (n - 1); 42 | return Math.sqrt(variance); 43 | }; 44 | 45 | export const createHistogramBuckets = (numbers: number[]): [number[], number[]] => { 46 | const min = Math.min(...numbers); 47 | const max = Math.max(...numbers); 48 | const range = max - min; 49 | const bucketSize = range / 20; 50 | 51 | const buckets: number[] = Array(20).fill(0); 52 | const bucketLabels: number[] = []; 53 | let acc = min; 54 | for (let i = 0; i <= 20; i++) { 55 | bucketLabels.push(Number(acc.toFixed(3))); 56 | acc += bucketSize; 57 | } 58 | 59 | for (const num of numbers) { 60 | const bucketIndex = Math.min(Math.floor((num - min) / bucketSize), 19); 61 | buckets[bucketIndex]++; 62 | } 63 | 64 | return [buckets, bucketLabels]; 65 | }; 66 | -------------------------------------------------------------------------------- /server/src/util/timing.ts: -------------------------------------------------------------------------------- 1 | // send a request, wait for response, return timing 2 | export const timingFunc = async (func: () => Promise): Promise<[T, number]> => { 3 | const a = process.hrtime.bigint(); 4 | const res = await func(); 5 | const b = process.hrtime.bigint(); 6 | const execTime = Number(b - a) / 1_000_000; 7 | return [res, execTime]; 8 | }; 9 | -------------------------------------------------------------------------------- /server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["src"], 3 | "compilerOptions": { 4 | "strict": true, 5 | "target": "es2022", 6 | "esModuleInterop": true, 7 | "module": "NodeNext", 8 | "declaration": true, 9 | "outDir": "./dist" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /test/src/cache.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { Redis } from 'ioredis'; 3 | import { configureCache } from '../../server/dist/cache/cache.js'; 4 | 5 | describe('CacheMunk tests', () => { 6 | const redis = new Redis({ port: 6379, host: '127.0.0.1' }); 7 | const { get, set, invalidate } = configureCache({ redis }); 8 | 9 | before(async () => { 10 | await redis.flushall(); 11 | }); 12 | 13 | it('cache functions should be defined', () => { 14 | expect(get).not.to.be.undefined; 15 | expect(set).not.to.be.undefined; 16 | expect(invalidate).not.to.be.undefined; 17 | }); 18 | 19 | it('should preserve data integrity', async () => { 20 | const testData = { 21 | name: 'astring', 22 | arr: ['a', 'b', 'c'], 23 | nestedArr: [1, 2, [3, 4]], 24 | }; 25 | 26 | const stringifiedObj = JSON.stringify(testData); 27 | await set('testKey1', stringifiedObj, []); 28 | const val = await get('testKey1'); 29 | expect(val).to.equal(stringifiedObj); 30 | }); 31 | 32 | it('should return null if the requested resource does not exist', async () => { 33 | const val = await get('testKey2'); 34 | expect(val).to.be.null; 35 | }); 36 | 37 | it('should invalidate dependencies', async () => { 38 | await set('key1', 'val1', ['dep1']); 39 | expect(await get('key1')).to.equal('val1'); 40 | 41 | await invalidate('dep1'); 42 | expect(await get('key1')).to.be.null; 43 | }); 44 | 45 | after(async () => { 46 | await redis.flushall(); 47 | await redis.quit(); 48 | }); 49 | }); 50 | -------------------------------------------------------------------------------- /test/src/data.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { generateData } from '../../server/dist/benchmarks/mock/data.js'; 3 | import { generator } from '../../server/dist/benchmarks/mock/generator.js'; 4 | 5 | describe('Mock data generator tests', () => { 6 | const test = (): boolean => { 7 | const byteLen = Buffer.byteLength(JSON.stringify(generator())); 8 | 9 | const testKb = (n: number): boolean => { 10 | const data = generateData(n, generator); 11 | const totalByteLen = Buffer.byteLength(JSON.stringify(data)); 12 | if (totalByteLen - (byteLen + 1) >= n * 1024) { 13 | return false; 14 | } 15 | return true; 16 | }; 17 | 18 | return testKb(10) && testKb(20) && testKb(150) && testKb(300); 19 | }; 20 | 21 | it('should corretly generate semi-random data payloads of various sizes', () => { 22 | expect(test()).to.equal(true); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /test/src/stats.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { percentile } from '../../server/dist/util/stats.js'; 3 | 4 | describe('Percentile function tests', () => { 5 | it('should return the correct median', () => { 6 | const data = [18, 1, 3, 4, 6, 8, 9, 15]; 7 | const result = percentile(data, 50); 8 | expect(result).to.equal(7); 9 | }); 10 | 11 | it('should return the correct p100', () => { 12 | const data = [18, 1, 3, 4, 6, 8, 9, 15]; 13 | const result = percentile(data, 100); 14 | expect(result).to.equal(18); 15 | }); 16 | 17 | it('should return the correct p25', () => { 18 | const data = [18, 1, 3, 4, 6, 8, 9, 15]; 19 | const result = percentile(data, 25); 20 | expect(result).to.equal(3.75); 21 | }); 22 | 23 | it('should return the correct p0', () => { 24 | const data = [18, 1, 3, 4, 6, 8, 9, 15]; 25 | const result = percentile(data, 0); 26 | expect(result).to.equal(1); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /test/src/timing.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { timingFunc } from '../../server/dist/util/timing.js'; 3 | 4 | describe('Timing function', () => { 5 | const delay = 1; 6 | const testFunc = async (): Promise => 7 | new Promise((resolve) => { 8 | setTimeout(() => { 9 | resolve(0); 10 | }, delay); 11 | }); 12 | 13 | it('should return result and execTime', async () => { 14 | const [res, execTime] = await timingFunc(testFunc); 15 | expect(res).to.equal(0); 16 | expect(execTime).to.be.greaterThan(delay); 17 | expect(execTime).to.be.lessThan(delay + 1); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /test/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["./src"], 3 | "compilerOptions": { 4 | "strict": true, 5 | "target": "es2022", 6 | "module": "NodeNext", 7 | "outDir": "./dist" 8 | } 9 | } 10 | --------------------------------------------------------------------------------