├── assets ├── full_black.png ├── Obsidian_New.png ├── banner_black.png ├── logo_whitebg.png ├── banner_gradient.png ├── Obsidian_New_nobg.png ├── bannerfull_gradient.png └── bannerfull_mutegradient.png ├── .vscode └── settings.json ├── .gitignore ├── clientMod.ts ├── mod.ts ├── docker-compose.yml ├── egg.json ├── Dockerfile ├── .github ├── pull_request_template.md └── workflows │ └── publish-to-nest.land.yml ├── tsconfig.json ├── src ├── utils.js ├── mapSelections.js ├── queryHash.js ├── Browser │ ├── wTinyLFU Sub-Caches │ │ ├── slruSub-cache.js │ │ └── lruSub-cache.js │ ├── insertTypenames.js │ ├── FrequencySketch.js │ ├── normalize.js │ ├── lruBrowserCache.js │ ├── wTinyLFUBrowserCache.js │ └── lfuBrowserCache.js ├── DoSSecurity.ts ├── restructure.ts ├── invalidateCacheCheck.ts ├── normalize.ts ├── quickCache.js └── Obsidian.ts ├── test_files ├── READ_ME_TEST.md ├── rhum_test_files │ ├── browserNormalize_test.ts │ ├── serverCache_test.js │ ├── transformResponse_test.js │ ├── lfuBrowserCache_test.ts │ ├── writeCache_test.ts │ ├── insertTypenames_test.ts │ ├── restructure_test.ts │ ├── DoSSecurity_test.ts │ ├── readCache_test.ts │ ├── serverNormalize_test.js │ ├── wTinyLFU_test.js │ └── destructure_test.ts └── test_variables │ ├── DoSSecurity_variables.ts │ ├── insertTypenames_variables.ts │ ├── garbage_collection_variables.ts │ ├── transformResponse_variables.ts │ ├── lfuBrowserCache_variables.ts │ ├── transformResponseLight.ts │ ├── readCache_variables.ts │ ├── writeCache_variables.ts │ ├── quickCacheLight.js │ ├── restructure_variables.ts │ ├── wTinyLFU_variables.js │ └── browserNormalize_variables.ts ├── LICENSE ├── ObsidianWrapper └── ObsidianWrapper.jsx └── README.md /assets/full_black.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/full_black.png -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "deno.enable": true, 3 | "deno.lint": true, 4 | "deno.unstable": true 5 | } -------------------------------------------------------------------------------- /assets/Obsidian_New.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/Obsidian_New.png -------------------------------------------------------------------------------- /assets/banner_black.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/banner_black.png -------------------------------------------------------------------------------- /assets/logo_whitebg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/logo_whitebg.png -------------------------------------------------------------------------------- /assets/banner_gradient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/banner_gradient.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dump.rdb 2 | .DS_Store 3 | .env 4 | server.tsx 5 | server2.tsx 6 | sampleServer.tsx 7 | demo 8 | server3.tsx -------------------------------------------------------------------------------- /assets/Obsidian_New_nobg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/Obsidian_New_nobg.png -------------------------------------------------------------------------------- /assets/bannerfull_gradient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/bannerfull_gradient.png -------------------------------------------------------------------------------- /assets/bannerfull_mutegradient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-source-labs/obsidian/HEAD/assets/bannerfull_mutegradient.png -------------------------------------------------------------------------------- /clientMod.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ObsidianWrapper, 3 | useObsidian, 4 | } from './ObsidianWrapper/ObsidianWrapper.jsx'; 5 | 6 | export { ObsidianWrapper, useObsidian }; 7 | -------------------------------------------------------------------------------- /mod.ts: -------------------------------------------------------------------------------- 1 | import { ObsidianRouter } from './src/Obsidian.ts'; 2 | import gql from 'https://deno.land/x/oak_graphql@0.6.2/graphql-tag/index.ts'; 3 | 4 | export { ObsidianRouter, gql }; 5 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | site: 5 | build: . 6 | restart: always 7 | volumes: 8 | - ./:/usr/app 9 | ports: 10 | - 3000:3000 11 | -------------------------------------------------------------------------------- /egg.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "obsidian", 3 | "description": "GraphQL, built for Deno - a native GraphQL caching client and server module", 4 | "homepage": "https://github.com/open-source-labs/obsidian", 5 | "files": [ 6 | "./**/*.ts", 7 | "README.md" 8 | ], 9 | "entry": "./mod.ts" 10 | } 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM hayd/alpine-deno:1.3.0 2 | 3 | EXPOSE 3000 4 | 5 | WORKDIR /usr/app 6 | 7 | COPY . . 8 | 9 | CMD [ "run", "--unstable", "--allow-net", "--allow-env", "--allow-read", "testServer.ts", "-c", "tsconfig.json" ] 10 | 11 | # deno run --unstable --allow-net --allow-env --allow-read testServer.ts -c tsconfig.json 12 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Checklist 2 | 3 | - [ ] Bugfix 4 | - [ ] New feature 5 | - [ ] Refactor 6 | 7 | # Related Issue 8 | 9 | - the problem you are solving goes here. 10 | 11 | # Solution 12 | 13 | - solution to the problem goes here here. Why did you solve this problem the way you did? 14 | 15 | # Additional Info 16 | 17 | - Any additional information or context 18 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compileOnSave": true, 3 | "compilerOptions": { 4 | "allowJs": true, 5 | "target": "ES2021", 6 | "jsx": "react", 7 | "noImplicitAny": false, 8 | "module": "CommonJS", 9 | "strict": true, 10 | "lib": ["dom"] 11 | }, 12 | "include": ["./**/*"], 13 | "exclude": [ 14 | "./plugins/**/*", 15 | "./typings/**/*", 16 | "./built/**/*" // This is what fixed it! 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | export function deepEqual(object1, object2) { 2 | const keys1 = Object.keys(object1); 3 | const keys2 = Object.keys(object2); 4 | if (keys1.length !== keys2.length) { 5 | return false; 6 | } 7 | for (const key of keys1) { 8 | const val1 = object1[key]; 9 | const val2 = object2[key]; 10 | const areObjects = isObject(val1) && isObject(val2); 11 | if ( 12 | areObjects && !deepEqual(val1, val2) || 13 | !areObjects && val1 !== val2 14 | ) { 15 | return false; 16 | } 17 | } 18 | return true; 19 | } 20 | 21 | function isObject(object) { 22 | return object != null && typeof object === 'object'; 23 | } -------------------------------------------------------------------------------- /test_files/READ_ME_TEST.md: -------------------------------------------------------------------------------- 1 | Please see below for details on how to run Obsidian's test suite and additional testing resources: 2 | 3 | How to Run Obsidian Tests: 4 | To run all tests call: 5 | deno test --allow-env 6 | This can be called from the root obsidian directory and it will locate and call all test files 7 | 8 | To run a specific test file call: 9 | deno test --allow-env path/test_file.ts 10 | deno test --allow-all test_files 11 | Example: deno test --allow-env test_files/rhum_test_files/restructure_test.ts 12 | 13 | Additional Deno Testing Resources: 14 | 15 | 1. Deno Testing Docs: https://deno.land/manual/testing 16 | 2. Rhum Testing Docs: https://deno.land/x/rhum@v1.1.4 17 | -------------------------------------------------------------------------------- /.github/workflows/publish-to-nest.land.yml: -------------------------------------------------------------------------------- 1 | name: "publish current release to https://nest.land" 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | jobs: 9 | publishToNestDotLand: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Setup repo 14 | uses: actions/checkout@v2 15 | 16 | - name: "setup" # check: https://github.com/actions/virtual-environments/issues/1777 17 | uses: denolib/setup-deno@v2 18 | with: 19 | deno-version: v1.4.6 20 | 21 | - name: "check nest.land" 22 | run: | 23 | deno run --allow-net --allow-read --allow-run https://deno.land/x/cicd/publish-on-nest.land.ts ${{ secrets.GITHUB_TOKEN }} ${{ secrets.NESTAPIKEY }} ${{ github.repository }} 24 | -------------------------------------------------------------------------------- /test_files/rhum_test_files/browserNormalize_test.ts: -------------------------------------------------------------------------------- 1 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 2 | import normalizeResult from '../../src/Browser/normalize.js'; 3 | import { test } from '../test_variables/browserNormalize_variables.ts'; 4 | 5 | Rhum.testPlan('normalize.ts', () => { 6 | Rhum.testSuite('normalizeTestSuite', () => { 7 | Rhum.testCase( 8 | 'expected result to equal object with ROOT_QUERY and hash:value pairs', 9 | async () => { 10 | const result = normalizeResult(test.queryObject1, test.resultObject1); 11 | Rhum.asserts.assertEquals(result, test.resultObj1); 12 | } 13 | ); 14 | }); 15 | Rhum.testSuite('normalizeAliasTestSuite', () => { 16 | Rhum.testCase( 17 | 'expected result to equal object with ROOT_QUERY and hash:value pairs', 18 | async () => { 19 | const result = normalizeResult(test.aliasTestQueryObj, test.aliasTestResult); 20 | Rhum.asserts.assertEquals(result, test.aliasTestRootHash); 21 | } 22 | ); 23 | }); 24 | }); 25 | 26 | Rhum.run(); 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 OSLabs Beta 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/mapSelections.js: -------------------------------------------------------------------------------- 1 | /** @format */ 2 | 3 | import { gql } from 'https://deno.land/x/oak_graphql/mod.ts'; 4 | 5 | export function mapSelectionSet(query) { 6 | // Gets fields from query and stores all in an array - used to selectively query cache 7 | const selectionKeysMap = {}; 8 | const ast = gql(query); 9 | const selections = ast.definitions[0].selectionSet.selections; 10 | const tableName = selections[0].name.value; 11 | 12 | const recursiveMap = (recurseSelections) => { 13 | for (const selection of recurseSelections) { 14 | if (selection.name && selection.name.value) { 15 | selectionKeysMap[selection.name.value] = selection.name.value; 16 | } 17 | if (selection.alias && selection.alias.value) { 18 | selectionKeysMap[selection.alias.value] = selection.name.value; 19 | } 20 | 21 | if (selection.selectionSet && selection.selectionSet.selections) { 22 | recursiveMap(selection.selectionSet.selections); 23 | } 24 | } 25 | }; 26 | recursiveMap(selections); 27 | 28 | // filter out table name from array, leaving only fields 29 | const selectedFields = Object.keys(selectionKeysMap).filter( 30 | (key) => key !== tableName 31 | ); 32 | return selectedFields; 33 | } 34 | -------------------------------------------------------------------------------- /test_files/rhum_test_files/serverCache_test.js: -------------------------------------------------------------------------------- 1 | /** 2 | * NOTES: 3 | * This file will test the read and write method on the Cache class functionality. 4 | */ 5 | import { Cache } from '../test_variables/quickCacheLight.js' 6 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 7 | import { test as testWrite } from '../test_variables/writeCache_variables.ts'; 8 | import { test as testRead } from '../test_variables/readCache_variables.ts'; 9 | 10 | //====================================================================== 11 | 12 | Rhum.testPlan('Write to Cache class', () => { 13 | Rhum.testSuite('write()', () => { 14 | Rhum.testCase('Should write to redis cache', async () => { 15 | const cache = new Cache(testWrite.originalCache); 16 | await cache.write(testWrite.queryStr, testWrite.respObj); 17 | Rhum.asserts.assertEquals(cache.storage, testWrite.originalCache); 18 | }); 19 | Rhum.testCase( 20 | 'should not overwrite the fields in the original cache with the new fields if the fields are not the same', 21 | async () => { 22 | const cache = new Cache(testWrite.originalCache); 23 | await cache.write(testWrite.queryStrTwo, testWrite.respObj); 24 | Rhum.asserts.assertEquals(testWrite.originalCache, cache.storage); 25 | } 26 | ); 27 | }); 28 | 29 | Rhum.testSuite('read()', () => { 30 | Rhum.testCase( 31 | '\n *** \n serverCache_test \nshould return a graphql response object if all required values are found in the cache', 32 | async () => { 33 | const cache = new Cache(testRead.cache); 34 | cache.write(testRead.singularInputQuery, testRead.singularQueryResObj) 35 | const result = await cache.read(testRead.singularInputQuery); 36 | Rhum.asserts.assertEquals(result, testRead.singularQueryResObj); 37 | } 38 | ); 39 | }); 40 | }); 41 | 42 | Rhum.run(); 43 | -------------------------------------------------------------------------------- /test_files/test_variables/DoSSecurity_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | DEPTH_2_QUERY: ` 3 | query AllActionMovies { 4 | movies(input: { genre: ACTION }) { 5 | __typename 6 | id 7 | title 8 | genre 9 | actors { 10 | id 11 | firstName 12 | lastName 13 | } 14 | } 15 | }`, 16 | 17 | DEPTH_2_MUTATION: ` 18 | mutation AllActionMoviesAndAllActors { 19 | movies(input: { genre: ACTION }) { 20 | id 21 | title 22 | genre 23 | actors { 24 | id 25 | firstName 26 | lastName 27 | } 28 | } 29 | }`, 30 | 31 | MULTIPLE_DEPTH_2_QUERY: ` 32 | query AllActionMovies { 33 | movies(input: { genre: ACTION }) { 34 | __typename 35 | id 36 | title 37 | genre 38 | }, 39 | movies(input: { genre: ACTION }) { 40 | __typename 41 | id 42 | title 43 | genre 44 | actors { 45 | id 46 | firstName 47 | lastName 48 | } 49 | }, 50 | movies(input: { genre: ACTION }) { 51 | __typename 52 | id 53 | title 54 | genre 55 | actors { 56 | id 57 | firstName 58 | lastName 59 | } 60 | } 61 | }`, 62 | 63 | MULTIPLE_DEPTH_2_MUTATION: ` 64 | mutation AllActionMovies { 65 | movies(input: { genre: ACTION }) { 66 | __typename 67 | id 68 | title 69 | genre 70 | }, 71 | movies(input: { genre: ACTION }) { 72 | __typename 73 | id 74 | title 75 | genre 76 | actors { 77 | id 78 | firstName 79 | lastName 80 | } 81 | }, 82 | movies(input: { genre: ACTION }) { 83 | __typename 84 | id 85 | title 86 | genre 87 | actors { 88 | id 89 | firstName 90 | lastName 91 | } 92 | } 93 | }`, 94 | }; -------------------------------------------------------------------------------- /test_files/rhum_test_files/transformResponse_test.js: -------------------------------------------------------------------------------- 1 | import { assert, equal, assertStrictEquals, assertEquals } from "https://deno.land/std/testing/asserts.ts"; 2 | // import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 3 | import { transformResponse, detransformResponse } from '../test_variables/transformResponseLight.ts'; 4 | import { test } from '../test_variables/transformResponse_variables.ts'; 5 | import { Cache } from '../test_variables/quickCacheLight.js' 6 | 7 | 8 | 9 | // transformResponse 10 | Deno.test('transformResponse - expected transformation to work on nested response objects', () => { 11 | const result = transformResponse(test.detransformedResponse_nested, test.hashableKeys); 12 | assertEquals(JSON.stringify(result), JSON.stringify(test.transformedResponse_nested)) 13 | }) 14 | Deno.test('transformResponse - expected transformation to work on non nested response objects', () => { 15 | const result = transformResponse(test.detransformedResponse_notnested, test.hashableKeys); 16 | assertEquals(JSON.stringify(result), JSON.stringify(test.transformedResponse_notnested)) 17 | }) 18 | 19 | // detransformResponse 20 | // test cases below do not seem to work when working with Redis - not sure why 21 | // Deno.test('detransformResponse - expected detransformation to work on nested response objects', () => { 22 | // const cache = new Cache(); 23 | // for (let i = 0; i < test.writeHashes.length; i++) { 24 | // cache.cacheWriteObject(test.writeHashes[i], test.writeData[i]); 25 | // } 26 | // const result = detransformResponse(test.queryKey, test.transformedResponse_nested); 27 | // assertEquals(JSON.stringify(result), JSON.stringify(test.detransformedResponse_nested)) 28 | // }) 29 | // Deno.test('detransformResponse - expected detransformation to work on non nested response objects', () => { 30 | // const result = detransformResponse(test.queryKey, test.transformedResponse_notnested); 31 | // assertEquals(JSON.stringify(result), JSON.stringify(test.detransformedResponse_notnested)) 32 | // }) -------------------------------------------------------------------------------- /src/queryHash.js: -------------------------------------------------------------------------------- 1 | // Create hash table 2 | class Node { 3 | constructor(key, str) { 4 | this.value = {key, str}; 5 | this.next = null; 6 | } 7 | } 8 | 9 | class LinkedList { 10 | constructor() { 11 | this.head = null; 12 | this.tail = null; 13 | } 14 | 15 | // adds a node to the end of the linked list 16 | addNode(key, str) { 17 | if (this.head === null) { 18 | this.head = new Node(key, str); 19 | this.tail = this.head; 20 | } else { 21 | this.tail.next = new Node(key, str); 22 | this.tail = this.tail.next 23 | } 24 | } 25 | 26 | // finds a node from the SHA256-hashed queryStr and returns the queryStr 27 | getNode(key) { 28 | if (this.head === null) return undefined; 29 | let currNode = this.head; 30 | while (currNode) { 31 | if (currNode.value.key === key) return currNode.value.str; 32 | else currNode = currNode.next; 33 | } 34 | return undefined; 35 | } 36 | } 37 | 38 | export class HashTable { 39 | constructor(size) { 40 | this.SIZE = size; 41 | this.table = new Array(this.SIZE); 42 | } 43 | 44 | // adds a value to the hashTable 45 | add(sha256Str, str) { 46 | const index = hashSlingingSlasher(sha256Str, this.SIZE); 47 | // if there is nothing at that index of the hash table 48 | if (!this.table[index]) { 49 | // initialize a new linked list and add a node to it 50 | this.table[index] = new LinkedList(); 51 | this.table[index].addNode(sha256Str, str); 52 | // if there is already a linked list at that index 53 | } else { 54 | // add a new node 55 | this.table[index].addNode(sha256Str, str); 56 | } 57 | } 58 | 59 | // gets the queryStr given the SHA256-Hashed queryStr 60 | get(key) { 61 | 62 | const index = hashSlingingSlasher(key, this.SIZE); 63 | if (!this.table[index]) return undefined; 64 | return this.table[index].getNode(key); 65 | } 66 | 67 | } 68 | 69 | // hashing function 70 | function hashSlingingSlasher(string, size) { 71 | let hash = 0; 72 | if (string.length === 0) return hash; 73 | for (let i = 0; i < string.length; i++) { 74 | const letter = string.charCodeAt(i); 75 | hash = ((hash << 5) - hash) + letter; 76 | hash = hash & hash; // Convert to 32bit integer 77 | } 78 | return Math.abs(hash) % size; 79 | } -------------------------------------------------------------------------------- /test_files/test_variables/insertTypenames_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | singleQueryInput: ` 3 | query AllActionMoviesAndAllActors { 4 | movies(input: { genre: ACTION }) { 5 | __typename 6 | id 7 | title 8 | genre 9 | actors { 10 | id 11 | firstName 12 | lastName 13 | } 14 | } 15 | } 16 | } 17 | `, 18 | 19 | singleQueryOutput: 20 | 'query AllActionMoviesAndAllActors { movies(input: { genre: ACTION }) { __typename id title genre actors { __typename id firstName lastName } } } }', 21 | 22 | singleMutationInput: ` 23 | mutation AllActionMoviesAndAllActors { 24 | movies(input: { genre: ACTION }) { 25 | id 26 | title 27 | genre 28 | actors { 29 | id 30 | firstName 31 | lastName 32 | } 33 | } 34 | } 35 | } 36 | `, 37 | 38 | singleMutationOutput: 39 | 'mutation AllActionMoviesAndAllActors { movies(input: { genre: ACTION }) { __typename id title genre actors { __typename id firstName lastName } } } }', 40 | 41 | multipleQueriesInput: ` 42 | query AllActionMoviesAndAllActors { 43 | movies(input: { genre: ACTION }) { 44 | __typename 45 | id 46 | title 47 | genre 48 | actors { 49 | id 50 | firstName 51 | lastName 52 | } 53 | } 54 | actors { 55 | id 56 | firstName 57 | lastName 58 | films { 59 | __typename 60 | id 61 | title 62 | } 63 | } 64 | } 65 | } 66 | `, 67 | multipleQueriesOutput: 68 | 'query AllActionMoviesAndAllActors { movies(input: { genre: ACTION }) { __typename id title genre actors { __typename id firstName lastName } } actors { __typename id firstName lastName films { __typename id title } } } }', 69 | 70 | fieldsStrInput: 71 | '{ __typename id title genre actors { id firstName lastName } }', 72 | fieldsStrOutput: 73 | '{ __typename id title genre actors { __typename id firstName lastName } }', 74 | 75 | newAliasTestQuery: ` 76 | query twoHeros { 77 | empireHero: hero(episode: EMPIRE) { 78 | name 79 | } 80 | jediHero: hero(episode: JEDI) { 81 | name 82 | } 83 | }`, 84 | 85 | newAliasTestResult: `query twoHeros { empireHero: hero(episode: EMPIRE) { __typename name } jediHero: hero(episode: JEDI) { __typename name } }`, 86 | }; 87 | -------------------------------------------------------------------------------- /test_files/rhum_test_files/lfuBrowserCache_test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * NOTES: 3 | * 1.This file will test the write method on the Cache class functionality: 4 | * Should return the new updated cache when the cache was successfully updated with the same reference to the original cache 5 | // Should return the string 'Cache update' when the cache was successfully updated. 6 | * Should update the original cache with the new fields and queries. 7 | * Should not overwrite the fields in the original cache with the new fields if the fields are not the same 8 | * Should test capacity 9 | */ 10 | 11 | import LFUCache from '../../src/Browser/lfuBrowserCache.js'; 12 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 13 | import { test } from '../test_variables/lfuBrowserCache_variables.ts'; 14 | import normalizeResult from '../../src/Browser/normalize.js'; 15 | 16 | Rhum.testPlan('LFU Browser Cache Testing', () => { 17 | Rhum.testSuite('write/read nested data object', () => { 18 | Rhum.testCase( 19 | 'should store a nested data object into LFUCache and read the stored un-nested data objects by calling their hashes', 20 | async () => { 21 | const cache = new LFUCache(10); 22 | await cache.write(test.nestedObj.queryStr, test.nestedObj.respObj); 23 | for (let key of Object.keys(test.nestedObj.expectedCache)) { 24 | await Rhum.asserts.assertEquals( 25 | cache.get(key), 26 | test.nestedObj.expectedCache[key] 27 | ); 28 | } 29 | } 30 | ); 31 | }); 32 | Rhum.testSuite('LFU cache evict the proper items', () => { 33 | Rhum.testCase( 34 | 'should remove the least frequently used item from cache', 35 | async () => { 36 | const cache = new LFUCache(5); 37 | await cache.write(test.LFUObj.queryStr1, test.LFUObj.respObj1); 38 | await Rhum.asserts.assertEquals( 39 | cache.get('Actor~1'), 40 | test.LFUObj.expectedCache1['Actor~1'] 41 | ); 42 | await cache.get('Actor~2'); 43 | await cache.get('Actor~3'); 44 | await cache.get('Actor~4'); 45 | await cache.get('Actor~5'); 46 | await cache.write(test.LFUObj.queryStr2, test.LFUObj.respObj2); 47 | Rhum.asserts.assertEquals(cache.get('Actor~1'), undefined); 48 | } 49 | ); 50 | }); 51 | }); 52 | 53 | Rhum.run(); 54 | // TO RUN TEST: deno test test_files/rhum_test_files/lfuBrowserCache_test.ts --allow-env 55 | -------------------------------------------------------------------------------- /src/Browser/wTinyLFU Sub-Caches/slruSub-cache.js: -------------------------------------------------------------------------------- 1 | import LRUCache from './lruSub-cache.js'; 2 | 3 | /***** 4 | * Main SLRU Cache 5 | *****/ 6 | export default function SLRUCache(capacity) { 7 | // Probationary LRU Cache using existing LRU structure in lruBrowserCache.js 8 | this.probationaryLRU = new LRUCache(capacity * .20); 9 | // Protected LRU Cache 10 | this.protectedLRU = new LRUCache(capacity * .80); 11 | } 12 | 13 | // Get item from cache, updates last access, 14 | // and promotes existing items to protected 15 | SLRUCache.prototype.get = function (key) { 16 | // get the item from the protectedLRU 17 | const protectedItem = this.protectedLRU.get(key); 18 | // check to see if the item is in the probationaryLRU 19 | const probationaryItem = this.probationaryLRU.peek(key); 20 | 21 | // If the item is in neither segment, return undefined 22 | if (protectedItem === null && probationaryItem === null) return; 23 | 24 | // If the item only exists in the protected segment, return that item 25 | if (protectedItem !== null) return protectedItem; 26 | 27 | // If the item only exists in the probationary segment, promote to protected and return item 28 | // if adding an item to the protectedLRU results in ejection, demote ejected node 29 | this.probationaryLRU.delete(key); 30 | this.putAndDemote(key, probationaryItem); 31 | return probationaryItem; 32 | } 33 | 34 | // add or update item in cache 35 | SLRUCache.prototype.put = function (key, node) { 36 | // if the item is in the protected segment, update it 37 | if (this.protectedLRU.nodeHash.get(key)) this.putAndDemote(key, node); 38 | else if (this.probationaryLRU.nodeHash(key)) { 39 | // if the item is in the probationary segment, 40 | // promote and update it 41 | this.probationaryLRU.delete(key); 42 | this.putAndDemote(key, node); 43 | } 44 | // if in neither, add item to the probationary segment 45 | else this.probationaryLRU.put(key, node) 46 | } 47 | 48 | // Check to see if the item exists in the cache without updating access 49 | SLRUCache.prototype.has = function (key) { 50 | return this.protectedLRU.nodeHash.get(key) || this.probationaryLRU.nodeHash.get(key); 51 | } 52 | 53 | // Adds a node to the protectedLRU 54 | SLRUCache.prototype.putAndDemote = function (key, value) { 55 | // if adding an item to the protectedLRU results in ejection, demote ejected node 56 | const demoted = this.protectedLRU.put(key, value); 57 | if (demoted) this.probationaryLRU.put(demoted.key, demoted.value); 58 | } -------------------------------------------------------------------------------- /test_files/rhum_test_files/writeCache_test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * NOTES: 3 | * 1.This file will test the write method on the Cache class functionality: 4 | * Should return the new updated cache when the cache was successfully updated with the same reference to the original cache 5 | // Should return the string 'Cache update' when the cache was successfully updated. 6 | * Should update the original cache with the new fields and queries. 7 | * Should not overwrite the fields in the original cache with the new fields if the fields are not the same 8 | * 9 | */ 10 | 11 | import Cache from '../../src/Browser/CacheClassBrowser.js'; 12 | import { Cache as CacheServer } from '../../src/quickCache.js'; 13 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 14 | import { test } from '../test_variables/writeCache_variables.ts'; 15 | 16 | Rhum.testPlan('write method on Cache class', () => { 17 | Rhum.testSuite('write', () => { 18 | Rhum.testCase( 19 | 'should return the new updated cache when the cache was successfully updated with the same reference to the original cache', 20 | () => { 21 | const cache = new Cache(test.originalCache); 22 | cache.write(test.queryStr, test.respObj); 23 | Rhum.asserts.assertEquals(cache.storage, test.originalCache); 24 | } 25 | ); 26 | Rhum.testCase( 27 | 'should update the original cache with the new fields and queries', 28 | () => { 29 | const cache = new Cache(test.originalCache); 30 | cache.write(test.queryStr, test.respObj); 31 | Rhum.asserts.assertEquals(cache.storage, test.expectedResultCache); 32 | } 33 | ); 34 | Rhum.testCase( 35 | 'should not overwrite the fields in the original cache with the new fields if the fields are not the same', 36 | () => { 37 | const cache = new Cache(test.originalCache); 38 | cache.write(test.queryStrTwo, test.respObj); 39 | Rhum.asserts.assertEquals(test.originalCache, cache.storage); 40 | } 41 | ); 42 | // The following test requires the redis server to be started to test functionality. 43 | // 44 | // Rhum.testCase( 45 | // 'alias test case', 46 | // async () => { 47 | // const cache = new CacheServer(test.originalCache); 48 | // await cache.write(test.aliasQuery, test.aliasResponse); 49 | // await console.log(cache.storage); 50 | // Rhum.asserts.assertEquals(cache.storage, test.originalCache); 51 | // } 52 | // ); 53 | }); 54 | }); 55 | Rhum.run(); 56 | // TO RUN TEST: deno test test_files/rhum_test_files/writeCache_test.ts --allow-env 57 | -------------------------------------------------------------------------------- /test_files/rhum_test_files/insertTypenames_test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | insertTypenames, 3 | addTypenamesToFieldsStr, 4 | findClosingBrace, 5 | } from '../../src/Browser/insertTypenames.js'; 6 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 7 | import { test } from '../test_variables/insertTypenames_variables.ts'; 8 | 9 | Rhum.testPlan('insertTypenames.js', () => { 10 | Rhum.testSuite('insertTypenames()', () => { 11 | Rhum.testCase( 12 | 'should add __typenames meta field to every level of a graphql query', 13 | () => { 14 | const result = insertTypenames(test.singleQueryInput); 15 | Rhum.asserts.assertEquals(result, test.singleQueryOutput); 16 | } 17 | ); 18 | Rhum.testCase( 19 | 'should add __typenames meta field to every level of a graphql mutation', 20 | () => { 21 | const result = insertTypenames(test.singleMutationInput); 22 | Rhum.asserts.assertEquals(result, test.singleMutationOutput); 23 | } 24 | ); 25 | Rhum.testCase( 26 | 'should add __typenames meta field to every level of a graphql operation with multiple queries', 27 | () => { 28 | const result = insertTypenames(test.multipleQueriesInput); 29 | Rhum.asserts.assertEquals(result, test.multipleQueriesOutput); 30 | } 31 | ); 32 | }); 33 | 34 | Rhum.testSuite('addTypenamesToFieldsStr()', () => { 35 | Rhum.testCase( 36 | 'should add __typenames meta field to every level of a field string', 37 | () => { 38 | const result = addTypenamesToFieldsStr(test.fieldsStrInput); 39 | Rhum.asserts.assertEquals(result, test.fieldsStrOutput); 40 | } 41 | ); 42 | }); 43 | Rhum.testSuite('findClosingBrace()', () => { 44 | Rhum.testCase( 45 | 'should return the index of the matching closing brace', 46 | () => { 47 | const result = findClosingBrace('asdf{asasd}a', 4); 48 | Rhum.asserts.assertEquals(result, 10); 49 | } 50 | ); 51 | Rhum.testCase( 52 | 'should return the index of the matching closing brace when there are other nested brace', 53 | () => { 54 | const result = findClosingBrace('asdf{as{{a}}sd}a', 4); 55 | Rhum.asserts.assertEquals(result, 14); 56 | } 57 | ); 58 | }); 59 | 60 | Rhum.testSuite('insertTypenames()', () => { 61 | Rhum.testCase( 62 | 'should add __typenames meta field to graphql alias query', 63 | () => { 64 | const result = insertTypenames(test.newAliasTestQuery); 65 | Rhum.asserts.assertEquals(result, test.newAliasTestResult); 66 | } 67 | ); 68 | }); 69 | }); 70 | Rhum.run(); 71 | -------------------------------------------------------------------------------- /test_files/rhum_test_files/restructure_test.ts: -------------------------------------------------------------------------------- 1 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 2 | //import { assert } from "https://deno.land/std@0.102.0/testing/asserts.ts"; 3 | import { restructure } from '../../src/restructure.ts'; 4 | import { test } from '../test_variables/restructure_variables.ts'; 5 | import { gql } from "https://deno.land/x/oak_graphql/mod.ts"; 6 | //import {concatInlineFragments, parseFragmentToInlineFragment} from "https://deno.land/x/oak_graphql/graphql-tools/utils/fragments.ts"; 7 | import { print, visit } from "https://deno.land/x/graphql_deno/mod.ts"; 8 | 9 | // Testing Fragments with two Seperate queries 10 | 11 | Rhum.testPlan("restructure.ts", () => { 12 | Rhum.testSuite("restructure fragment test", () => { 13 | // // No Fragment test 14 | Rhum.testCase("restructure fragment test - no fragments", () => { 15 | const result = restructure(test.fragmentTestData0); 16 | Rhum.asserts.assertEquals(result, print(gql(test.fragmentResultData0))); 17 | }); 18 | // Fragment with two seperate queries 19 | Rhum.testCase("restructure fragment test - results in two seperate queries", () => { 20 | const result = restructure(test.fragmentTestData); 21 | Rhum.asserts.assertEquals(result, print(gql(test.fragmentResultData))); 22 | }); 23 | // Fragments in One query 24 | Rhum.testCase('restructure fragment test - result in one query', () => { 25 | const result = restructure(test.fragmentTestData2); 26 | Rhum.asserts.assertEquals(result, print(gql(test.fragmentResultData2))); 27 | }); 28 | // Fragment with Nested Fragments 29 | Rhum.testCase('restructure fragment test - nested fragments', () => { 30 | const result = restructure(test.fragmentTestData3); 31 | Rhum.asserts.assertEquals(result, print(gql(test.fragmentResultData3))) 32 | }); 33 | // Single Variable Test 34 | Rhum.testSuite('restructure single variable query tests', () => { 35 | Rhum.testCase('restructure single variable query string', () => { 36 | const result = restructure( 37 | test.singleVariableTestData, 38 | ); 39 | Rhum.asserts.assertEquals(result, print(gql(test.singleVariableTestResult))); 40 | }) 41 | }); 42 | // // Multi Variable Test 43 | Rhum.testSuite('restructure multi variable query test', () => { 44 | Rhum.testCase('restructure multi variable query', () => { 45 | const result = restructure( 46 | test.multiVariableTestData, 47 | ); 48 | Rhum.asserts.assertEquals(result, print(gql(test.multiVariableTestResult))); 49 | }); 50 | }) 51 | }); 52 | }); 53 | 54 | 55 | Rhum.run(); // <-- make sure to include this so that your tests run via `deno test` -------------------------------------------------------------------------------- /src/DoSSecurity.ts: -------------------------------------------------------------------------------- 1 | import destructureQueries from './Browser/destructure.js'; 2 | 3 | // Interface representing shape of query object after destructuring 4 | interface queryObj { 5 | queries?: Array, 6 | mutations?: Array, 7 | } 8 | 9 | /** 10 | * Tests whether a queryString (string representation of query) exceeds the maximum nested depth levels (queryDepthLimit) allowable for the instance of obsidian 11 | * @param {*} queryString the string representation of the graphql query 12 | * @param {*} queryDepthLimit number representation of the maximum query depth limit. Default 0 will return undefined. Root query doesn't count toward limit. 13 | * @returns boolean indicating whether the query depth exceeded maximum allowed query depth 14 | */ 15 | export default function queryDepthLimiter(queryString: string, queryDepthLimit: number = 0): void { 16 | const queryObj = destructureQueries(queryString) as queryObj; 17 | /** 18 | *Function that tests whether the query object debth exceeds maximum depth 19 | * @param {*} qryObj an object representation of the query (after destructure) 20 | * @param {*} qryDepthLim the maximum query depth 21 | * @param {*} depth indicates current depth level 22 | * @returns boolean indicating whether query depth exceeds maximum depth 23 | */ 24 | const queryDepthCheck = (qryObj: queryObj, qryDepthLim: number, depth: number = 0): boolean => { 25 | // Base case 1: check to see if depth exceeds limit, if so, return error (true means depth limit was exceeded) 26 | if (depth > qryDepthLim) return true; 27 | // Recursive case: Iterate through values of queryObj, and check if each value is an object, 28 | for (let value = 0; value < Object.values(qryObj).length; value++) { 29 | // if the value is an object, return invokation queryDepthCheck on nested object and iterate depth 30 | const currentValue = Object.values(qryObj)[value]; 31 | if (typeof currentValue === 'object') { 32 | return queryDepthCheck(currentValue, qryDepthLim, depth + 1); 33 | }; 34 | }; 35 | // Base case 2: reach end of object keys iteration,return false - depth has not been exceeded 36 | return false; 37 | }; 38 | 39 | // Check if queryObj has query or mutation root type, if so, call queryDepthCheck on each element, i.e. each query or mutation 40 | if (queryObj.queries) { 41 | for(let i = 0; i < queryObj.queries.length; i++) { 42 | if(queryDepthCheck(queryObj.queries[i], queryDepthLimit)) { 43 | throw new Error( 44 | 'Security Error: Query depth exceeded maximum query depth limit' 45 | ); 46 | }; 47 | }; 48 | }; 49 | 50 | if (queryObj.mutations){ 51 | for (let i = 0; i < queryObj.mutations.length; i++) { 52 | if (queryDepthCheck(queryObj.mutations[i], queryDepthLimit)) { 53 | throw new Error( 54 | 'Security Error: Query depth exceeded maximum mutation depth limit' 55 | ); 56 | }; 57 | }; 58 | }; 59 | } 60 | -------------------------------------------------------------------------------- /test_files/rhum_test_files/DoSSecurity_test.ts: -------------------------------------------------------------------------------- 1 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 2 | import queryDepthLimiter from '../../src/DoSSecurity.ts'; 3 | import { test } from '../test_variables/DoSSecurity_variables.ts'; 4 | 5 | 6 | Rhum.testPlan('DoSSecurity.ts', () => { 7 | Rhum.testSuite('Query depth limit NOT exceeded tests', () => { 8 | Rhum.testCase('Test query depth of 2 does not exceed allowable depth 2', () => { 9 | const results = queryDepthLimiter(test.DEPTH_2_QUERY, 2); 10 | Rhum.asserts.assertEquals(undefined, results); 11 | }); 12 | Rhum.testCase('Test mutation depth of 2 does not exceed allowable depth of 2', () => { 13 | const results = queryDepthLimiter(test.DEPTH_2_MUTATION, 2); 14 | Rhum.asserts.assertEquals(undefined, results); 15 | }); 16 | }); 17 | 18 | Rhum.testSuite('Query/mutation depth limit IS EXCEEDED tests', () => { 19 | Rhum.testCase('Test query depth 2 should exceed depth limit of 1', () => { 20 | Rhum.asserts.assertThrows( 21 | () => { 22 | queryDepthLimiter(test.DEPTH_2_QUERY, 1) 23 | }, 24 | Error, 25 | "Security Error: Query depth exceeded maximum query depth limit", 26 | ) 27 | }); 28 | Rhum.testCase('Test mutation depth 2 should exceed depth limit of 1', () => { 29 | Rhum.asserts.assertThrows( 30 | () => { 31 | queryDepthLimiter(test.DEPTH_2_MUTATION, 1) 32 | }, 33 | Error, 34 | "Security Error: Query depth exceeded maximum mutation depth limit", 35 | ) 36 | }); 37 | }); 38 | 39 | Rhum.testSuite('Query depth limit NOT exceeded, multiple query tests', () => { 40 | Rhum.testCase('Test multiple queries of depth 2 should not exceed allowable depth 2', () => { 41 | const results = queryDepthLimiter(test.MULTIPLE_DEPTH_2_QUERY, 2); 42 | Rhum.asserts.assertEquals(undefined, results); 43 | }); 44 | Rhum.testCase('Test multiple mutations of depth 2 should not exceed allowable depth 2', () => { 45 | const results = queryDepthLimiter(test.MULTIPLE_DEPTH_2_MUTATION, 2); 46 | Rhum.asserts.assertEquals(undefined, results); 47 | }); 48 | }); 49 | 50 | Rhum.testSuite('Multiple query/mutation depth limit IS EXCEEDED tests', () => { 51 | Rhum.testCase('Test multiple query depth should be exceeded', () => { 52 | Rhum.asserts.assertThrows( 53 | () => { 54 | queryDepthLimiter(test.MULTIPLE_DEPTH_2_QUERY, 1) 55 | }, 56 | Error, 57 | "Security Error: Query depth exceeded maximum query depth limit", 58 | ) 59 | }); 60 | Rhum.testCase('Test multiple mutation depth should be exceeded', () => { 61 | Rhum.asserts.assertThrows( 62 | () => { 63 | queryDepthLimiter(test.MULTIPLE_DEPTH_2_MUTATION, 1) 64 | }, 65 | Error, 66 | "Security Error: Query depth exceeded maximum mutation depth limit", 67 | ) 68 | }); 69 | }); 70 | }); 71 | 72 | Rhum.run(); -------------------------------------------------------------------------------- /src/Browser/wTinyLFU Sub-Caches/lruSub-cache.js: -------------------------------------------------------------------------------- 1 | import { plural } from "https://deno.land/x/deno_plural@2.0.0/mod.ts"; 2 | 3 | class Node { 4 | constructor (key, value) { 5 | this.key = key; 6 | this.value = value; 7 | this.next = this.prev = null; 8 | } 9 | } 10 | 11 | export default function LRUCache(capacity) { 12 | this.capacity = capacity; 13 | this.currentSize = 0; 14 | // node hash for cache lookup and storage 15 | this.nodeHash = new Map(); 16 | 17 | // doubly-linked list to keep track of recency and handle eviction 18 | this.head = new Node('head', null); 19 | this.tail = new Node('tail', null); 20 | this.head.next = this.tail; 21 | this.tail.prev = this.head; 22 | } 23 | 24 | LRUCache.prototype.removeNode = function (node) { 25 | const prev = node.prev; 26 | const next = node.next; 27 | prev.next = next; 28 | next.prev = prev; 29 | }; 30 | 31 | 32 | LRUCache.prototype.addNode = function (node) { 33 | const tempTail = this.tail.prev; 34 | tempTail.next = node; 35 | 36 | this.tail.prev = node; 37 | node.next = this.tail; 38 | node.prev = tempTail; 39 | } 40 | 41 | // Like get, but doesn't update anything 42 | LRUCache.prototype.peek = function(key) { 43 | const node = this.nodeHash.get(key); 44 | if (!node) return null; 45 | return node.value; 46 | } 47 | 48 | // Like removeNode, but takes key and deletes from hash 49 | LRUCache.prototype.delete = function (key) { 50 | const node = this.nodeHash.get(key); 51 | const prev = node.prev; 52 | const next = node.next; 53 | prev.next = next; 54 | next.prev = prev; 55 | this.nodeHash.delete(key); 56 | } 57 | 58 | LRUCache.prototype.get = function(key) { 59 | const node = this.nodeHash.get(key); 60 | 61 | // check if node does not exist in nodeHash obj 62 | if (!node) return null; 63 | // update position to most recent in list 64 | this.removeNode(node); 65 | this.addNode(node); 66 | return node.value; 67 | } 68 | 69 | // used by wTinyLFU to get SLRU eviction candidates for TinyLFU decision 70 | LRUCache.prototype.getCandidate = function () { 71 | const tempHead = this.head.next; 72 | this.removeNode(tempHead); 73 | this.nodeHash.delete(tempHead.key); 74 | return {key: tempHead.key, value: tempHead.value}; 75 | } 76 | 77 | LRUCache.prototype.put = function (key, value) { 78 | // create a new node 79 | const newNode = new Node(key, value); 80 | 81 | // remove node from old position 82 | const node = this.nodeHash.get(key); 83 | if (node) this.removeNode(node); 84 | 85 | // add new node to tail 86 | this.addNode(newNode); 87 | this.nodeHash.set(key, newNode); 88 | 89 | // check capacity - if over capacity, remove and reassign head node 90 | if (this.nodeHash.size > this.capacity){ 91 | const tempHead = this.head.next; 92 | this.removeNode(tempHead); 93 | this.nodeHash.delete(tempHead.key); 94 | // return tempHead for use in w-TinyLFU's SLRU cache 95 | return {key: tempHead.key, value: tempHead.value}; 96 | } 97 | } 98 | 99 | -------------------------------------------------------------------------------- /src/Browser/insertTypenames.js: -------------------------------------------------------------------------------- 1 | /** @format */ 2 | 3 | // this function will insert __typename meta fields into a querystring 4 | export function insertTypenames(queryOperationStr) { 5 | let newQueryStr = ""; 6 | // removes extra whitespace 7 | const queryStr = queryOperationStr.replace(/\s\s+/g, " ").trim(); 8 | // finds end of operation name by finding the beginning of the query strings 9 | const startIndex = queryStr.indexOf("{"); 10 | // adds the operation name to newQueryStr 11 | const operationName = queryStr.substring(0, startIndex + 1); 12 | newQueryStr += operationName; 13 | // iterate through query until you find beginning of field object 14 | let bracePairs = 0; 15 | let parensPairs = 0; 16 | for (let i = startIndex + 1; i < queryStr.length; i += 1) { 17 | const char = queryStr[i]; 18 | // functionality when the beginning of fields Obj is found 19 | if (char === "{" && !bracePairs && !parensPairs) { 20 | const endOfFieldsStr = findClosingBrace(queryStr, i); 21 | const fieldsStr = queryStr.substring(i, endOfFieldsStr + 1); 22 | const fieldsStrWithTypenames = addTypenamesToFieldsStr(fieldsStr); 23 | newQueryStr += fieldsStrWithTypenames; 24 | i = endOfFieldsStr; 25 | continue; 26 | } 27 | // bracket/parens counter 28 | if (char === "{") bracePairs += 1; 29 | if (char === "}") bracePairs -= 1; 30 | if (char === "(") parensPairs += 1; 31 | if (char === ")") parensPairs -= 1; 32 | // adds current character to newQueryString 33 | newQueryStr += char; 34 | } 35 | return newQueryStr; 36 | } 37 | 38 | // helper function to add typenames to fieldsStr where needed 39 | export function addTypenamesToFieldsStr(fieldsStr) { 40 | let newFieldsStr = fieldsStr; 41 | let currentOpenBrace = 0; 42 | let isAnotherOpenBrace = true; 43 | while (isAnotherOpenBrace) { 44 | // find the next open brace 45 | let nextOpenBrace = newFieldsStr.indexOf("{", currentOpenBrace + 1); 46 | if (nextOpenBrace === -1) isAnotherOpenBrace = false; 47 | const nextTypenameIndex = newFieldsStr.indexOf( 48 | "__typename", 49 | currentOpenBrace 50 | ); 51 | // check to see if __typename is between the current open brace and the next open brace 52 | if ( 53 | (nextTypenameIndex > nextOpenBrace && nextOpenBrace !== -1) || 54 | nextTypenameIndex === -1 55 | ) { 56 | // inserts __typename after currentOpenBrace 57 | newFieldsStr = 58 | newFieldsStr.substring(0, currentOpenBrace + 1) + 59 | " __typename " + 60 | newFieldsStr.substring(currentOpenBrace + 1); 61 | // updates nextOpenBrace after insertion 62 | nextOpenBrace = newFieldsStr.indexOf("{", currentOpenBrace + 1); 63 | } 64 | currentOpenBrace = nextOpenBrace; 65 | } 66 | return newFieldsStr; 67 | } 68 | 69 | // helper function to find the partner closing brace 70 | export function findClosingBrace(str, index) { 71 | let bracePairs = 0; 72 | // skips ahead 1 index to skip first brace 73 | for (let i = index + 1; i < str.length; i += 1) { 74 | const char = str[i]; 75 | if (char === "}" && !bracePairs) return i; 76 | if (char === "{") bracePairs += 1; 77 | if (char === "}") bracePairs -= 1; 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /test_files/test_variables/garbage_collection_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | cache: { 3 | ROOT_QUERY: { 4 | 'actor(id:1)': 'Actor~1', 5 | movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 6 | actors: ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4', 'Actor~6'], 7 | 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4', 'Movie~5'], 8 | }, 9 | ROOT_MUTATION: {}, 10 | 'Movie~1': { 11 | id: '1', 12 | title: 'Indiana Jones and the Last Crusade', 13 | actors: ['Actor~1', 'Actor~2', 'Actor~6', 'Actor~7'], 14 | genre: 'ACTION', 15 | releaseYear: 1989, 16 | }, 17 | 'Movie~2': { 18 | id: '2', 19 | title: 'Empire Strikes Back', 20 | actors: ['Actor~1', 'Actor~3'], 21 | releaseYear: 1980, 22 | }, 23 | 'Movie~3': { 24 | id: '3', 25 | title: 'Witness', 26 | actors: ['Actor~1', 'Actor~4'], 27 | releaseYear: 1985, 28 | }, 29 | 'Movie~4': { 30 | id: '4', 31 | title: 'Air Force One', 32 | actors: ['Actor~1', 'Actor~5'], 33 | genre: 'ACTION', 34 | releaseYear: 1997, 35 | }, 36 | 'Movie~5': 'DELETED', 37 | 'Actor~1': { id: '1', firstName: 'Harrison' }, 38 | 'Actor~2': { id: '2', firstName: 'Sean' }, 39 | 'Actor~3': { id: '3', firstName: 'Mark' }, 40 | 'Actor~4': { id: '4', firstName: 'Patti' }, 41 | 'Actor~5': { id: '5', firstName: 'Gary' }, 42 | 'Actor~6': 'DELETED', 43 | 'Actor~7': { id: '7', firstName: 'Christy' } 44 | }, 45 | 'badHashesSet': ['Movie~5', 'Actor~6'], 46 | 'goodHashesSet': ['Actor~1', 'Movie~1', 'Movie~2', 'Movie~3', 'Movie~4', 'Actor~2', 'Actor~3', 'Actor~4'], 47 | 'cleanedRootQuery': { 48 | 'actor(id:1)': 'Actor~1', 49 | movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 50 | actors: ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4'], 51 | 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'], 52 | }, 53 | 'getGoodHashes': ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4', 'Actor~5', 'Actor~7', 'Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 54 | 'removeInaccessibleHashes': { 55 | ROOT_QUERY: { 56 | 'actor(id:1)': 'Actor~1', 57 | movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 58 | actors: ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4'], 59 | 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'], 60 | }, 61 | ROOT_MUTATION: {}, 62 | 'Movie~1': { 63 | id: '1', 64 | title: 'Indiana Jones and the Last Crusade', 65 | actors: ['Actor~1', 'Actor~2', 'Actor~7'], 66 | genre: 'ACTION', 67 | releaseYear: 1989, 68 | }, 69 | 'Movie~2': { 70 | id: '2', 71 | title: 'Empire Strikes Back', 72 | actors: ['Actor~1', 'Actor~3'], 73 | releaseYear: 1980, 74 | }, 75 | 'Movie~3': { 76 | id: '3', 77 | title: 'Witness', 78 | actors: ['Actor~1', 'Actor~4'], 79 | releaseYear: 1985, 80 | }, 81 | 'Movie~4': { 82 | id: '4', 83 | title: 'Air Force One', 84 | actors: ['Actor~1', 'Actor~5'], 85 | genre: 'ACTION', 86 | releaseYear: 1997, 87 | }, 88 | 'Actor~1': { id: '1', firstName: 'Harrison' }, 89 | 'Actor~2': { id: '2', firstName: 'Sean' }, 90 | 'Actor~3': { id: '3', firstName: 'Mark' }, 91 | 'Actor~4': { id: '4', firstName: 'Patti' }, 92 | 'Actor~5': { id: '5', firstName: 'Gary' }, 93 | 'Actor~7': { id: '7', firstName: 'Christy' } 94 | } 95 | }; 96 | -------------------------------------------------------------------------------- /test_files/test_variables/transformResponse_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | hashableKeys: ["__typename", "id"], 3 | queryKey: "{\n movies(input: {genre: ACTION}) {\n id\n __typename\n title\n releaseYear\n genre\n actors {\n id\n __typename\n firstName\n lastName\n }\n }\n}\n", 4 | detransformedResponse_nested: 5 | { 6 | data: { 7 | movies: [ 8 | { 9 | __typename: 'Movie', 10 | id: '1', 11 | title: 'Indiana Jones and the Last Crusade', 12 | genre: 'ACTION', 13 | actors: [ 14 | { 15 | __typename: 'Actor', 16 | id: '1', 17 | firstName: 'Harrison', 18 | lastName: 'Ford', 19 | }, 20 | { 21 | __typename: 'Actor', 22 | id: '2', 23 | firstName: 'Sean', 24 | lastName: 'Connery', 25 | }, 26 | ], 27 | }, 28 | { 29 | __typename: 'Movie', 30 | id: '4', 31 | title: 'Air Force One', 32 | genre: 'ACTION', 33 | actors: [ 34 | { 35 | __typename: 'Actor', 36 | id: '1', 37 | firstName: 'Harrison', 38 | lastName: 'Ford', 39 | }, 40 | { 41 | __typename: 'Actor', 42 | id: '5', 43 | firstName: 'Gary', 44 | lastName: 'Oldman', 45 | }, 46 | ], 47 | }, 48 | ] 49 | } 50 | }, 51 | detransformedResponse_notnested: 52 | { 53 | data: { 54 | movies: [ 55 | { 56 | __typename: 'Movie', 57 | id: '1', 58 | title: 'Indiana Jones and the Last Crusade', 59 | genre: 'ACTION', 60 | }, 61 | { 62 | __typename: 'Movie', 63 | id: '4', 64 | title: 'Air Force One', 65 | genre: 'ACTION' 66 | }, 67 | { 68 | __typename: 'Movie', 69 | id: '5', 70 | title: 'Die Hard', 71 | genre: 'ACTION' 72 | } 73 | ] 74 | } 75 | }, 76 | transformedResponse_nested: 77 | { 78 | "~Movie~1": { 79 | "~Actor~1": {}, 80 | "~Actor~2": {} 81 | }, 82 | "~Movie~4": { 83 | "~Actor~1": {}, 84 | "~Actor~5": {} 85 | }, 86 | }, 87 | transformedResponse_notnested: 88 | { 89 | "~Movie~1": {}, 90 | "~Movie~4": {}, 91 | "~Movie~5": {} 92 | }, 93 | writeHashes: 94 | ['~Movie~1', '~Movie~4', '~Movie~5', '~Actor~1', '~Actor~2', '~Actor~5'], 95 | writeData: 96 | [ 97 | { 98 | __typename: 'Movie', 99 | id: '1', 100 | title: 'Indiana Jones and the Last Crusade', 101 | genre: 'ACTION', 102 | }, 103 | { 104 | __typename: 'Movie', 105 | id: '4', 106 | title: 'Air Force One', 107 | genre: 'ACTION' 108 | }, 109 | { 110 | __typename: 'Movie', 111 | id: '5', 112 | title: 'Die Hard', 113 | genre: 'ACTION' 114 | }, 115 | { 116 | __typename: 'Actor', 117 | id: '1', 118 | firstName: 'Harrison', 119 | lastName: 'Ford', 120 | }, 121 | { 122 | __typename: 'Actor', 123 | id: '2', 124 | firstName: 'Sean', 125 | lastName: 'Connery', 126 | }, 127 | { 128 | __typename: 'Actor', 129 | id: '5', 130 | firstName: 'Gary', 131 | lastName: 'Oldman', 132 | } 133 | ] 134 | } -------------------------------------------------------------------------------- /test_files/rhum_test_files/readCache_test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * NOTES: 3 | * 1.This file will test the read method on the Cache class functionalities: 4 | * Should return a graphql response object if all required values are found in the cache. 5 | * Should return u;ndefined if any field is missing value in the cache. 6 | * Should accept multiple queries in one query operation. 7 | * Should ignore the elements with a 'DELETE' value and not throw a cache miss if asked for in the query string 8 | * 2. This file will test populateAllHashes functionalities: 9 | * Should return undefined if any field is missing from the cache. 10 | * Should return an array of field objects if all the elements are found in the cache. 11 | */ 12 | 13 | import Cache from '../../src/Browser/CacheClassBrowser.js'; 14 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 15 | import { test } from '../test_variables/readCache_variables.ts'; 16 | 17 | Rhum.testPlan('read method on Cache class', () => { 18 | Rhum.testSuite('read()', () => { 19 | Rhum.testCase( 20 | '\n *** \n readCache_test \n should return a graphql response object if all required values are found in the cache', 21 | async () => { 22 | const cache = new Cache(test.cache); 23 | const result = await cache.read(test.singularInputQuery); 24 | Rhum.asserts.assertEquals(result, test.singularQueryResObj); 25 | } 26 | ); 27 | Rhum.testCase( 28 | 'should return undefined if any field is missing a value in the cache', 29 | async () => { 30 | const cache = new Cache(test.cache); 31 | const result = await cache.read(test.undefinedInputQuery); 32 | Rhum.asserts.assertEquals(result, undefined); 33 | } 34 | ); 35 | Rhum.testCase( 36 | 'should accept multiple queries in one query operation', 37 | async () => { 38 | const cache = new Cache(test.cache); 39 | const result = await cache.read(test.multipleInputQuery); 40 | Rhum.asserts.assertEquals(result, test.multipleQueriesResObj); 41 | } 42 | ); 43 | Rhum.testCase( 44 | "should ignore the elements with a 'DELETE' value and not throw a cache miss if asked for in the query string", 45 | async () => { 46 | const cache = new Cache(test.cache); 47 | const result = await cache.read(test.queryStrDelete); 48 | Rhum.asserts.assertEquals(result, test.multipleQueriesResObj); 49 | } 50 | ); 51 | Rhum.testCase('should accept alias queries', async () => { 52 | const cache = new Cache(test.aliasCache); 53 | const result = await cache.read(test.aliasQueryString); 54 | Rhum.asserts.assertEquals(result, test.aliasResObj); 55 | }); 56 | }); 57 | 58 | Rhum.testSuite('populateAllHashes()', () => { 59 | Rhum.testCase( 60 | 'should return undefined if any field is missing from the cache', 61 | async () => { 62 | const cache = new Cache(test.cache); 63 | const result = await cache.populateAllHashes( 64 | ['Actor~1'], 65 | test.fieldsUndefined 66 | ); 67 | Rhum.asserts.assertEquals(result, undefined); 68 | } 69 | ); 70 | Rhum.testCase( 71 | 'should return an array of field objects if all the elements are found in the cache', 72 | async () => { 73 | const cache = new Cache(test.cache); 74 | const result = await cache.populateAllHashes( 75 | ['Actor~1'], 76 | test.fieldsComplete 77 | ); 78 | Rhum.asserts.assertEquals(result, [ 79 | { 80 | __typename: 'Actor', 81 | id: '1', 82 | firstName: 'Harrison', 83 | }, 84 | ]); 85 | } 86 | ); 87 | }); 88 | }); 89 | Rhum.run(); 90 | // TO RUN TEST: deno test test_files/rhum_test_files/readCache_test.ts --allow-env 91 | -------------------------------------------------------------------------------- /src/restructure.ts: -------------------------------------------------------------------------------- 1 | import { gql } from "https://deno.land/x/oak_graphql@0.6.4/mod.ts"; 2 | 3 | import { print, visit } from "https://deno.land/x/graphql_deno@v15.0.0/mod.ts"; 4 | 5 | /** 6 | * The restructure function: 7 | * - it converts the query string into an AST with the visitor pattern design. 8 | * - it handles fragments. 9 | * - it handles 10 | * 11 | * @param {any} value - Query string 12 | * @return {string} string 13 | */ 14 | export function restructure(value: any) { 15 | const variables = value.variables || {}; 16 | const operationName = value.operationName; 17 | 18 | let ast = gql(value.query); 19 | 20 | let fragments: { [key: string]: any } = {}; 21 | let containsFrags: boolean = false; 22 | let existingFrags: { [key: string]: any } = {}; 23 | let existingVars: { [key: string]: any } = {}; 24 | 25 | const buildFragsVisitor = { 26 | FragmentDefinition: (node: any) => { 27 | fragments[node.name.value] = node.selectionSet.selections; 28 | }, 29 | }; 30 | const buildDefaultVarsVisitor = { 31 | VariableDefinition: (node: any) => { 32 | if (node.defaultValue) { 33 | if (!variables[node.variable.name.value]) { 34 | variables[node.variable.name.value] = node.defaultValue.value; 35 | } 36 | } 37 | }, 38 | }; 39 | 40 | const rewriteVarsVistor = { 41 | VariableDefinition: (node: any) => { 42 | return null; 43 | }, 44 | Variable: (node: any) => { 45 | if (variables.hasOwnProperty(node.name.value)) { 46 | return { kind: 'EnumValue', value: variables[node.name.value] }; 47 | } 48 | }, 49 | }; 50 | 51 | const rewriteVisitor = { 52 | FragmentSpread: (node: any) => { 53 | if (fragments.hasOwnProperty(node.name.value)) { 54 | return fragments[node.name.value]; 55 | } 56 | }, 57 | }; 58 | 59 | const clearFragVisitor = { 60 | FragmentDefinition: (node: any) => { 61 | if (fragments.hasOwnProperty(node.name.value)) { 62 | return null; 63 | } 64 | }, 65 | }; 66 | const checkFragmentationVisitor = { 67 | FragmentSpread: (node: any) => { 68 | containsFrags = true; 69 | existingFrags[node.name.value] = true; 70 | }, 71 | Variable: (node: any) => { 72 | containsFrags = true; 73 | existingVars[node.name.value] = true; 74 | }, 75 | }; 76 | 77 | const firstBuildVisitor = { 78 | ...buildFragsVisitor, 79 | ...buildDefaultVarsVisitor, 80 | }; 81 | 82 | const firstRewriteVisitor = { 83 | ...rewriteVisitor, 84 | ...rewriteVarsVistor, 85 | OperationDefinition: (node: any) => { 86 | if (operationName && node.name.value != operationName) { 87 | return null; 88 | } 89 | }, 90 | InlineFragment: (node: any) => { 91 | return [ 92 | { 93 | kind: 'Field', 94 | alias: undefined, 95 | name: { kind: 'Name', value: '__typename' }, 96 | arguments: [], 97 | directives: [], 98 | selectionSet: undefined, 99 | }, 100 | node, 101 | ]; 102 | }, 103 | }; 104 | 105 | visit(ast, { leave: firstBuildVisitor }); 106 | 107 | ast = gql(print(visit(ast, { leave: firstRewriteVisitor }))); 108 | visit(ast, { leave: checkFragmentationVisitor }); 109 | while (containsFrags) { 110 | containsFrags = false; 111 | fragments = {}; 112 | visit(ast, { enter: buildFragsVisitor }); 113 | 114 | ast = gql(print(visit(ast, { leave: firstRewriteVisitor }))); 115 | visit(ast, { leave: checkFragmentationVisitor }); 116 | 117 | //if existingFrags has a key that fragments does not 118 | const exfragskeys = Object.keys(existingFrags); 119 | const fragskeys = Object.keys(fragments); 120 | const exvarsskeys = Object.keys(existingVars); 121 | const varkeys = Object.keys(variables); 122 | //exfragskeys.every(key=>fragskeys.includes(key)) 123 | if (!exfragskeys.every((key) => fragskeys.includes(key))) { 124 | return console.log({ error: 'missing fragment definitions' }); 125 | } 126 | if (!exvarsskeys.every((key) => varkeys.includes(key))) { 127 | return console.log({ error: 'missing variable definitions' }); 128 | } 129 | } 130 | 131 | ast = visit(ast, { leave: clearFragVisitor }); 132 | 133 | return print(ast); 134 | } 135 | -------------------------------------------------------------------------------- /test_files/test_variables/lfuBrowserCache_variables.ts: -------------------------------------------------------------------------------- 1 | export const test: { [index: string]: any } = { 2 | nestedObj: { 3 | queryStr: `query movie { 4 | Movie(id:1) { 5 | __typename 6 | id 7 | title 8 | actors { 9 | __typename 10 | id 11 | firstName 12 | lastName 13 | } 14 | }}`, 15 | rootQuery: { 'Movie(id:1)': ['Movie~1'] }, 16 | expectedCache: { 17 | 'Movie~1': { 18 | title: 'Indiana Jones and the Last Crusade', 19 | id: '1', 20 | actors: ['Actor~1', 'Actor~2'], 21 | }, 22 | 'Actor~1': { 23 | id: '1', 24 | firstName: 'Harrison', 25 | lastName: 'Ford', 26 | }, 27 | 'Actor~2': { 28 | id: '2', 29 | firstName: 'Sean', 30 | lastName: 'Connery', 31 | }, 32 | }, 33 | respObj: { 34 | data: { 35 | Movie: [ 36 | { 37 | __typename: 'Movie', 38 | id: '1', 39 | title: 'Indiana Jones and the Last Crusade', 40 | actors: [ 41 | { 42 | __typename: 'Actor', 43 | id: '1', 44 | firstName: 'Harrison', 45 | lastName: 'Ford', 46 | }, 47 | { 48 | __typename: 'Actor', 49 | id: '2', 50 | firstName: 'Sean', 51 | lastName: 'Connery', 52 | }, 53 | ], 54 | }, 55 | ], 56 | }, 57 | }, 58 | }, 59 | LFUObj: { 60 | queryStr1: `{ 61 | Actors(movieID:1) { 62 | __typename 63 | id 64 | firstName 65 | lastName 66 | }}`, 67 | queryStr2: `{ 68 | Actors(movieID:2) { 69 | __typename 70 | id 71 | firstName 72 | lastName 73 | }}`, 74 | respObj1: { 75 | data: { 76 | Actors: [ 77 | { 78 | __typename: 'Actor', 79 | id: '1', 80 | firstName: 'Harrison', 81 | lastName: 'Ford', 82 | }, 83 | { 84 | __typename: 'Actor', 85 | id: '2', 86 | firstName: 'Sean', 87 | lastName: 'Connery', 88 | }, 89 | { 90 | __typename: 'Actor', 91 | id: '3', 92 | firstName: 'Nhan', 93 | lastName: 'Ly', 94 | }, 95 | { 96 | __typename: 'Actor', 97 | id: '4', 98 | firstName: 'Mark', 99 | lastName: 'Hammill', 100 | }, 101 | { 102 | __typename: 'Actor', 103 | id: '5', 104 | firstName: 'Christy', 105 | lastName: 'Gomez', 106 | }, 107 | ], 108 | }, 109 | }, 110 | respObj2: { 111 | data: { 112 | Actors: [ 113 | { 114 | __typename: 'Actor', 115 | id: '6', 116 | firstName: 'James Earl', 117 | lastName: 'Jones', 118 | }, 119 | ], 120 | }, 121 | }, 122 | expectedCache1: { 123 | 'Actor~1': { 124 | id: '1', 125 | firstName: 'Harrison', 126 | lastName: 'Ford', 127 | }, 128 | 'Actor~2': { 129 | id: '2', 130 | firstName: 'Sean', 131 | lastName: 'Connery', 132 | }, 133 | 'Actor~3': { 134 | id: '3', 135 | firstName: 'Nhan', 136 | lastName: 'Ly', 137 | }, 138 | 'Actor~4': { 139 | id: '4', 140 | firstName: 'Mark', 141 | lastName: 'Hammill', 142 | }, 143 | 'Actor~5': { 144 | id: '5', 145 | firstName: 'Christy', 146 | lastName: 'Gomez', 147 | }, 148 | }, 149 | expectedCache2: { 150 | 'Actor~6': { 151 | id: '6', 152 | firstName: 'James Earl', 153 | lastName: 'Jones', 154 | }, 155 | 'Actor~2': { 156 | id: '2', 157 | firstName: 'Sean', 158 | lastName: 'Connery', 159 | }, 160 | 'Actor~3': { 161 | id: '3', 162 | firstName: 'Nhan', 163 | lastName: 'Ly', 164 | }, 165 | 'Actor~4': { 166 | id: '4', 167 | firstName: 'Mark', 168 | lastName: 'Hammill', 169 | }, 170 | 'Actor~5': { 171 | id: '5', 172 | firstName: 'Christy', 173 | lastName: 'Gomez', 174 | }, 175 | }, 176 | }, 177 | }; 178 | -------------------------------------------------------------------------------- /test_files/test_variables/transformResponseLight.ts: -------------------------------------------------------------------------------- 1 | // need redis v0.23.2 to be compatible with Deno testing. That is why we need to separate transformResponseLight.ts from transformResponse.ts 2 | 3 | import { isHashableObject, containsHashableObject, hashMaker } from '../../src/normalize.ts'; 4 | import { GenericObject } from '../../src/normalize.ts'; 5 | import { Cache } from './quickCacheLight.js' 6 | const cache = new Cache; 7 | 8 | const isArrayOfHashableObjects = (arrayOfObjects: Array, hashableKeys: Array):boolean => { 9 | if (Array.isArray(arrayOfObjects)) { 10 | return arrayOfObjects.every(object => { 11 | return containsHashableObject(object, hashableKeys); 12 | }) 13 | } 14 | return false; 15 | } 16 | 17 | /* ----------------------------------------------------------------*/ 18 | /** transformResponse 19 | * Returns a nested object representing an object of references, where the references are hashes in Redis. The responseObject input must: 20 | * 1) Contain hashable object(s) 21 | * 2) have a first key of 'data', as should all GraphQL response objects 22 | * 3) have an inner array of data response objects corresponding to the GraphQL fields 23 | * 24 | * @param {GenericObject} responseObject GraphQL response Object for large read query 25 | * @param {array} hashableKeys Array of hashable keys 26 | * @return {GenericObject} Nested object representing an object of references, where the references are hashes in Redis 27 | */ 28 | export const transformResponse = (responseObject: any, hashableKeys: Array):GenericObject => { 29 | const result: GenericObject = {}; 30 | 31 | if (responseObject.data) { 32 | return transformResponse(responseObject.data, hashableKeys); 33 | } else if (isHashableObject(responseObject, hashableKeys)) { 34 | return result; 35 | } else { 36 | for (const key in responseObject) { 37 | if (isArrayOfHashableObjects(responseObject[key], hashableKeys)) { 38 | for (const element of responseObject[key]) { 39 | let hash = hashMaker(element, hashableKeys); 40 | result[hash] = transformResponse(element, hashableKeys); 41 | } 42 | } 43 | } 44 | } 45 | return result; 46 | } 47 | 48 | 49 | /* ----------------------------------------------------------------*/ 50 | /** detransformResponse 51 | * Returns a nested object representing the original graphQL response object for a given queryKey 52 | * @param {String} queryKey String representing the stringified GraphQL query for a big read query, which should have been saved as a key in Redis 53 | * @param {GenericObject} transformedValue Nested object representing of references, where the references are hashes in Redis 54 | * @return {GenericObject} Nested object representing the original graphQL response object for a given queryKey 55 | */ 56 | export const detransformResponse = async (queryKey: String, transformedValue: GenericObject):Promise => { 57 | // remove all text within parentheses aka '(input: ...)' 58 | queryKey = queryKey.replace(/\(([^)]+)\)/, ''); 59 | // save Regex matches for line break followed by '{' 60 | const matches = [...queryKey.matchAll(/\n([^\n]+)\{/g)]; 61 | 62 | // get fields of query 63 | const fields: Array = []; 64 | matches.forEach(match => { 65 | fields.push(match[1].trim()); 66 | }); 67 | const recursiveDetransform = async (transformedValue: GenericObject, fields: Array, depth: number = 0):Promise => { 68 | const result: GenericObject = {}; 69 | let currDepth = depth; 70 | 71 | console.log('tv-> ', transformedValue); 72 | // base case: innermost object with key:value pair of hash:{} 73 | if (Object.keys(transformedValue).length === 0) { 74 | return result; 75 | } else { 76 | let currField: string = fields[currDepth]; 77 | result[currField] = []; 78 | 79 | for (let hash in transformedValue) { 80 | console.log('hash -> ', hash); 81 | const redisValue: GenericObject = await cache.cacheReadObject(hash); 82 | console.log('redisVal -> ', redisValue); 83 | // edge case in which our eviction strategy has pushed partial Cache data out of Redis 84 | if (!redisValue) { 85 | return {'cache evicted': {}}; 86 | } 87 | 88 | result[currField].push(redisValue); 89 | 90 | result[currField][result[currField].length - 1] = Object.assign( 91 | result[currField][result[currField].length - 1], 92 | await recursiveDetransform(transformedValue[hash], fields, depth = currDepth + 1) 93 | ) 94 | } 95 | return result; 96 | } 97 | } 98 | const detransformedResult: GenericObject = {'data' : {}}; 99 | detransformedResult.data = await recursiveDetransform(transformedValue, fields); 100 | console.log('dt-> ', detransformedResult); 101 | return detransformedResult; 102 | } -------------------------------------------------------------------------------- /test_files/rhum_test_files/serverNormalize_test.js: -------------------------------------------------------------------------------- 1 | import { assert, equal, assertStrictEquals, assertEquals } from "https://deno.land/std/testing/asserts.ts"; 2 | import { containsHashableObject, isHashableObject, hashMaker, printHashableObject, normalizeObject } from '../../src/normalize.ts'; 3 | import { serverNormalizeTestVariables as data } from '../test_variables/serverNormalize_variables.ts'; 4 | const arrOfHashableKeys = ['id', '__typename']; 5 | 6 | // containsHashableObject 7 | Deno.test('normalize.ts - cointainsHashableObject - True test 1: object with hashable key properties and a nested object', () => { 8 | assert(containsHashableObject(data.containsHashableObjTrue1, arrOfHashableKeys) === true); 9 | }) 10 | Deno.test('normalize.ts - cointainsHashableObject - True test 2: object with hashable key properties and an array of nested objects', () => { 11 | assert(containsHashableObject(data.containsHashableObjTrue2, arrOfHashableKeys) === true); 12 | }) 13 | Deno.test('normalize.ts - cointainsHashableObject - True test 3: object with hashable key properties with nested array', () => { 14 | assert(containsHashableObject(data.containsHashableObjTrue3, arrOfHashableKeys) === true); 15 | }) 16 | Deno.test('normalize.ts - cointainsHashableObject - False test 1: array of hashable keys', () => { 17 | assert(containsHashableObject(data.containsHashableObjFalse1, arrOfHashableKeys) === false); 18 | }) 19 | Deno.test('normalize.ts - cointainsHashableObject - False test 2: array nested with an object of hashable key properties', () => { 20 | assert(containsHashableObject(data.containsHashableObjFalse2, arrOfHashableKeys) === false); 21 | }) 22 | Deno.test('normalize.ts - cointainsHashableObject - False test 3: object with nested object with hashable key properties', () => { 23 | assert(containsHashableObject(data.containsHashableObjFalse3, arrOfHashableKeys) === false); 24 | }) 25 | 26 | // isHashableObject 27 | Deno.test('normalize.ts - isHashableObject - True test 1: object with hashable key properties and no nesting', () => { 28 | assert(isHashableObject(data.isHashableObjTrue1, arrOfHashableKeys) === true); 29 | }) 30 | Deno.test('normalize.ts - isHashableObject - True test 2: object with hashable key properties and no nesting', () => { 31 | assert(isHashableObject(data.isHashableObjTrue2, arrOfHashableKeys) === true); 32 | }) 33 | Deno.test('normalize.ts - isHashableObject - False test 1: array of hashable keys', () => { 34 | assert(isHashableObject(data.isHashableObjFalse1, arrOfHashableKeys) === false); 35 | }) 36 | Deno.test('normalize.ts - isHashableObject - False test 2: object with hashable key properties and nesting', () => { 37 | assert(isHashableObject(data.isHashableObjFalse2, arrOfHashableKeys) === false); 38 | }) 39 | 40 | // hashMaker 41 | Deno.test('normalize.ts - hashMaker - Creates unique hash when a hashable object is passed through', () => { 42 | assertStrictEquals(hashMaker(data.isHashableObjTrue1, arrOfHashableKeys), "~7~Movie"); 43 | }) 44 | Deno.test('normalize.ts - hashMaker - Creates unique hash when a hashable object is passed through', () => { 45 | assertStrictEquals(hashMaker(data.isHashableObjTrue2, arrOfHashableKeys), "~1~Actor"); 46 | }) 47 | 48 | // printHashableObject 49 | Deno.test('normalize.ts - printHashableObject - Prints a hashable object when object with hashable key properties and no nesting is passed through', () => { 50 | assertEquals(printHashableObject(data.containsHashableObjTrue1), { "id": "11", "__typename": "Movie", "title": "Ad Astra" }); 51 | }) 52 | Deno.test('normalize.ts - printHashableObject - Prints a hashable object when object with hashable key properties and no nesting is passed through', () => { 53 | assertEquals(printHashableObject(data.containsHashableObjTrue2), { "id": "7", "__typename": "Movie", "title": "Ad Astra", "releaseYear": 2019, "genre": "SCIFI" }); 54 | }) 55 | Deno.test('normalize.ts - printHashableObject - Prints a hashable object when object with hashable key properties and no nesting is passed through', () => { 56 | assertEquals(printHashableObject(data.containsHashableObjTrue3), { "id": "1", "__typename": "Actor", "firstName": "Brad", "lastName": "Pitt" }); 57 | }) 58 | 59 | // normalizeObject 60 | Deno.test('normalize.ts - normalizeObject - Constructs an object of reference caches. Key being the hash used as redis key and value being object that is stored in redis', () => { 61 | assertEquals(normalizeObject(data.scifiMovies, ["id", "__typename"]), data.scifiMoviesNormalized); 62 | }) 63 | 64 | Deno.test('normalize.ts - normalizeObject - Constructs an object of reference caches. Key being the hash used as redis key and value being object that is stored in redis', () => { 65 | assertEquals(normalizeObject(data.arbitraryNestedScifiMovies, ["id", "__typename"]), data.arbitraryNestedScifiMoviesNormalized); 66 | }) 67 | 68 | Deno.test('normalize.ts - normalizeObject - returns empty object if values in customIdentifier argument are not found in the nested response object', () => { 69 | assertEquals(normalizeObject(data.scifiMovies, ['identifier', 'uid', 'someType']), {}); 70 | }) -------------------------------------------------------------------------------- /test_files/rhum_test_files/wTinyLFU_test.js: -------------------------------------------------------------------------------- 1 | import WTinyLFUCache from "../test_variables/wTinyLFU_variables.js"; 2 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 3 | 4 | Rhum.testPlan('WTinyLFU cache functionality', () => { 5 | Rhum.testSuite('WTinyLFU Initialization', () => { 6 | Rhum.testCase('should initialize with corect capacities', () => { 7 | const cache = new WTinyLFUCache(1000); 8 | Rhum.asserts.assertEquals(cache.capacity, 1000); 9 | Rhum.asserts.assertEquals(cache.WLRU.capacity, 10); 10 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.capacity, 198); 11 | Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.capacity, 792); 12 | }); 13 | }) 14 | Rhum.testSuite('Window cache functionality', () => { 15 | Rhum.testCase('should add new item to the windowLRU when adding to WTLFU cache', () => { 16 | const cache = new WTinyLFUCache(100); 17 | cache.putAndPromote('one', 1); 18 | Rhum.asserts.assertEquals(cache.WLRU.get('one'), 1); 19 | }); 20 | Rhum.testCase('should move items ejected from windowLRU into the probationaryLRU cache', async () => { 21 | const cache = new WTinyLFUCache(100); 22 | await cache.putAndPromote('one', 1); 23 | await cache.putAndPromote('two', 2); 24 | Rhum.asserts.assertEquals(cache.WLRU.get('one'), null); 25 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.peek('one'), 1); 26 | Rhum.asserts.assertEquals(cache.WLRU.get('two'), 2); 27 | }) 28 | Rhum.testCase('should promote items from probationaryLRU to the protectedLRU when accessed', async () => { 29 | const cache = new WTinyLFUCache(100); 30 | await cache.putAndPromote('one', 1); 31 | await cache.putAndPromote('two', 2); 32 | Rhum.asserts.assertEquals(cache.SLRU.get('one'), 1); 33 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), null); 34 | Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.peek('one'), 1); 35 | }) 36 | Rhum.testCase('should demote items ejected from protectedLRU to probationary LRU', async () => { 37 | const cache = new WTinyLFUCache(100); 38 | cache.SLRU.protectedLRU.capacity = 1; 39 | cache.SLRU.protectedLRU.put('one', 1); 40 | await cache.SLRU.putAndDemote('two', 2); 41 | Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('one'), null); 42 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), 1); 43 | Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('two'), 2); 44 | }) 45 | Rhum.testCase('should move highest frequency item into full probationary cache', async () => { 46 | const cache = new WTinyLFUCache(100); 47 | cache.SLRU.probationaryLRU.capacity = 1; 48 | await cache.putAndPromote('one', 1); 49 | await cache.putAndPromote('two', 2); 50 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), 1); 51 | cache.sketch['one'] = 3; 52 | cache.sketch['two'] = 2; 53 | await cache.putAndPromote('three', 3); 54 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), 1); 55 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('two'), null); 56 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('three'), null); 57 | Rhum.asserts.assertEquals(cache.WLRU.get('one'), null); 58 | Rhum.asserts.assertEquals(cache.WLRU.get('two'), null); 59 | Rhum.asserts.assertEquals(cache.WLRU.get('three'), 3); 60 | }) 61 | Rhum.testCase('should evict least recently used item from WLRU', async () => { 62 | const cache = new WTinyLFUCache(200); 63 | await cache.WLRU.put('one', 1); 64 | await cache.WLRU.put('two', 2); 65 | await cache.WLRU.put('three', 3); 66 | Rhum.asserts.assertEquals(cache.WLRU.get('one'), null); 67 | Rhum.asserts.assertEquals(cache.WLRU.get('two'), 2); 68 | Rhum.asserts.assertEquals(cache.WLRU.get('three'), 3); 69 | }) 70 | Rhum.testCase('should evict least recently used item from ProbationaryLRU', async () => { 71 | const cache = new WTinyLFUCache(100); 72 | cache.SLRU.probationaryLRU.capacity = 2; 73 | await cache.SLRU.probationaryLRU.put('one', 1); 74 | await cache.SLRU.probationaryLRU.put('two', 2); 75 | await cache.SLRU.probationaryLRU.put('three', 3); 76 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), null); 77 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('two'), 2); 78 | Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('three'), 3); 79 | }) 80 | Rhum.testCase('should evict least recently used item from ProtectedLRU', async () => { 81 | const cache = new WTinyLFUCache(100); 82 | cache.SLRU.protectedLRU.capacity = 2; 83 | await cache.SLRU.protectedLRU.put('one', 1); 84 | await cache.SLRU.protectedLRU.put('two', 2); 85 | await cache.SLRU.protectedLRU.put('three', 3); 86 | Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('one'), null); 87 | Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('two'), 2); 88 | Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('three'), 3); 89 | }) 90 | }) 91 | }); 92 | 93 | Rhum.run(); -------------------------------------------------------------------------------- /src/Browser/FrequencySketch.js: -------------------------------------------------------------------------------- 1 | export function FrequencySketch() { 2 | 3 | const RESET_MASK = 0x77777777; // 011101110111... 0001 0000 0000 0001 0000 4 | const ONE_MASK = 0x11111111; // 0001 0001 0001 5 | 6 | let sampleSize, blockMask, size; 7 | let table = []; 8 | 9 | /** 10 | * Initializes and increases the capacity of this FrequencySketch instance 11 | * so it can accurately estimate the popularity of data given the maximum 12 | * size of the cache. Frequency counts become zero when resizing. 13 | * 14 | * @param maxSize cache capacity 15 | */ 16 | this.updateCapacity = function(maxSize) { 17 | const max = Math.floor(maxSize); //to ensure it's an integer 18 | if(table.length >= max) return; 19 | 20 | table = Array(Math.max(nearestPowerOfTwo(max), 8)).fill().map(()=>Array(2).fill(0)); 21 | sampleSize = (maxSize === 0) ? 10 : (10*max); 22 | blockMask = (table.length >>> 3) - 1; 23 | 24 | if (sampleSize <= 0) sampleSize = Number.MAX_SAFE_INTEGER; 25 | size = 0; 26 | } 27 | /** 28 | * Returns true if the sketch has not been initialized, indicating updateCapcity 29 | * needs to be called before tracking frequencies. 30 | */ 31 | const isNotInitialized = () => { 32 | return table.length === 0; 33 | } 34 | /** 35 | * Returns the estimated frequency of an element, up to the maximum(15). 36 | * 37 | * @param el the element being counted 38 | * @return the estimated frequency - required to be nonnegative 39 | */ 40 | 41 | this.frequency = function(el) { 42 | if(isNotInitialized()) return 0; 43 | const count = Array(4); 44 | 45 | const blockHash = supphash(hashCode(el)); 46 | const counterHash = rehash(blockHash); 47 | const block = (blockHash & blockMask) << 3; 48 | 49 | for (let i = 0; i < 4; i++) { 50 | const h = counterHash >>> (i << 3); 51 | const index = (h >>> 1) & 15; 52 | const row = index % 2; 53 | const offset = h & 1; 54 | count[i] = ((table[block+offset+(i<<1)][row] >>> ((index >> 1) << 2)) & 15); 55 | } 56 | return Math.min(...count); 57 | } 58 | 59 | /** 60 | * Increment the frequency of the element if it does not exceed the maximum(15) 61 | * @param el element to add 62 | */ 63 | this.increment = function(el) { 64 | if (isNotInitialized()) return; 65 | 66 | const index = Array(8); 67 | const blockHash = supphash(hashCode(el)); 68 | const counterHash = rehash(blockHash); 69 | const block = (blockHash & blockMask) << 3; 70 | //in case we get that [Object object] bs 71 | 72 | for (let i = 0; i < 4; i++) { 73 | const h = counterHash >>> (i << 3); 74 | index[i] = (h >>> 1) & 15; 75 | const offset = h & 1; 76 | index[i + 4] = block + offset + (i << 1); 77 | } 78 | const incremented = 79 | incrementAt(index[4], index[0]) 80 | | incrementAt(index[5], index[1]) 81 | | incrementAt(index[6], index[2]) 82 | | incrementAt(index[7], index[3]); 83 | if (incremented && (++size == sampleSize)) { 84 | reset(); 85 | } 86 | 87 | } 88 | 89 | /** 90 | * Increments the specified counter by 1 if it is not already at the maximum value (15). 91 | * 92 | * @param i the table index (16 counters) 93 | * @param j the counter to increment 94 | * @return if incremented 95 | */ 96 | const incrementAt = (i,j) => { 97 | const row = j % 2; 98 | const offset = (j >> 1) << 2; 99 | const mask = (15 << offset); 100 | if ((table[i][row] & mask) != mask) { //if curr counter is not at maximum(15) 101 | table[i][row] += (1 << offset); 102 | return true; 103 | } 104 | return false; 105 | } 106 | 107 | /** Reduces every counter by half of its original value. */ 108 | const reset = () => { 109 | let count = 0; 110 | for (let i = 0; i < table.length; i++) { 111 | count += bitCount(table[i][0] & ONE_MASK) + bitCount(table[i][1] & ONE_MASK); 112 | table[i][0] = (table[i][0] >>> 1) & RESET_MASK; 113 | table[i][1] = (table[i][1] >>> 1) & RESET_MASK; 114 | } 115 | size = (size - (count >>> 2)) >>> 1; 116 | } 117 | /** Applies a supplemental hash functions for less collisions. */ 118 | const supphash = x => { 119 | x ^= x >> 17; 120 | x *= 0xed5ad4bb; 121 | x ^= x >> 11; 122 | x *= 0xac4c1b51; 123 | x ^= x >> 15; 124 | return x; 125 | } 126 | 127 | /** Applies another round of hashing to acheive three round hashing. */ 128 | const rehash = x => { 129 | x *= 0x31848bab; 130 | x ^= x >> 14; 131 | return x; 132 | } 133 | 134 | const nearestPowerOfTwo = num => { 135 | const exp = Math.floor(Math.log2(num)); 136 | if (Math.pow(2, exp) === num) return num; 137 | 138 | return Math.pow(2, exp+1); 139 | } 140 | 141 | const hashCode = (input) => { 142 | let hash, code; 143 | hash = 0; 144 | for (let i = 0; i < input.length; i++) { 145 | code = input.charCodeAt(i); 146 | hash = ((hash<<5)-hash)+code; 147 | hash = hash & hash; 148 | } 149 | return hash; 150 | } 151 | 152 | 153 | /** bitcounting for 32-bit integers (reference: https://graphics.stanford.edu/~seander/bithacks.html) */ 154 | 155 | const bitCount = n => { 156 | n = n - ((n >> 1) & 0x55555555); 157 | n = (n & 0x33333333) + ((n >> 2) & 0x33333333); 158 | const count = ((n + (n >> 4) & 0xF0F0F0F) * 0x1010101) >> 24; 159 | return count; 160 | } 161 | } 162 | 163 | FrequencySketch(); -------------------------------------------------------------------------------- /src/Browser/normalize.js: -------------------------------------------------------------------------------- 1 | /** @format */ 2 | 3 | // Normalizes responses using the query object from destructure and the response object from 4 | // the graphql request 5 | export default function normalizeResult(queryObj, resultObj, deleteFlag) { 6 | // Object to hold normalized obj 7 | const result = {}; 8 | // checks if there is a delete mutation 9 | if (deleteFlag) { 10 | //creates the ROOT_MUTATION hash that is being deleted 11 | result["ROOT_MUTATION"] = createRootQuery( 12 | queryObj.mutations, 13 | resultObj, 14 | deleteFlag 15 | ); 16 | 17 | //iterate thru the different response objects that were mutated 18 | 4 // Please do not disturb the mysterious, load-bearing 4. This is its home. 19 | const obj = resultObj.data; 20 | //checks if the current element is an array 21 | if (Array.isArray(obj)) { 22 | //iterates thru the array of objects and stores the hash in the result object with 'DELETE' as value 23 | obj.forEach((ele) => { 24 | const mutationKeys = Object.keys(ele); 25 | const hash = labelId(ele[mutationKeys[0]]); 26 | result[hash] = "DELETED"; 27 | }); 28 | } else { 29 | //else stores the hash in the result object with the value 'DELETE' 30 | const mutationKeys = Object.keys(obj); 31 | const hash = labelId(obj[mutationKeys[0]]); 32 | result[hash] = "DELETED"; 33 | } 34 | } 35 | 36 | // creates a stringified version of query request and stores it in ROOT_QUERY key 37 | else if (queryObj.queries || queryObj.mutations) { 38 | if (queryObj.queries) { 39 | result["ROOT_QUERY"] = createRootQuery(queryObj.queries, resultObj); 40 | } else { 41 | result["ROOT_MUTATION"] = createRootQuery(queryObj.mutations, resultObj); 42 | } 43 | for (const curr in resultObj.data) { 44 | if (!Array.isArray(resultObj.data[curr])) { 45 | const hashObj = createHash(resultObj.data[curr]); 46 | for (const hash in hashObj) { 47 | if (result[hash]) { 48 | Object.assign(result[hash], hashObj[hash]); 49 | } else { 50 | result[hash] = hashObj[hash]; 51 | } 52 | } 53 | } else { 54 | for (let i = 0; i < resultObj.data[curr].length; i++) { 55 | // pass current obj to createHash function to create obj of hashes 56 | const hashObj = createHash(resultObj.data[curr][i]); 57 | // check if the hash object pair exists, if not create new key value pair 58 | // if it does exist merge the hash pair with the existing key value pair 59 | for (const hash in hashObj) { 60 | if (result[hash]) { 61 | Object.assign(result[hash], hashObj[hash]); 62 | } else { 63 | result[hash] = hashObj[hash]; 64 | } 65 | } 66 | } 67 | } 68 | } 69 | } 70 | return result; 71 | } 72 | 73 | // creates the hashes for query requests and stores the reference hash that will be stored in result 74 | function createRootQuery(queryObjArr, resultObj) { 75 | const output = {}; 76 | queryObjArr.forEach((query) => { 77 | // if query has an alias declare it 78 | const alias = query.alias ?? null; 79 | const name = query.name; 80 | const args = query.arguments; 81 | const queryHash = name + args; 82 | const result = resultObj.data[alias] ?? resultObj.data[name]; 83 | // iterate thru the array of current query response 84 | // and store the hash of that response in an array 85 | 86 | if (Array.isArray(result)) { 87 | const arrOfHashes = []; 88 | result.forEach((obj) => { 89 | arrOfHashes.push(labelId(obj)); 90 | }); 91 | 92 | //store the array of hashes associated with the queryHash 93 | output[queryHash] = arrOfHashes; 94 | } else { 95 | output[queryHash] = [labelId(result)]; 96 | } 97 | }); 98 | return output; 99 | } 100 | 101 | //returns a hash value pair of each response obj passed in 102 | function createHash(obj, output = {}) { 103 | const hash = labelId(obj); 104 | //if output doesnt have a key of hash create a new obj with that hash key 105 | if (!output[hash]) output[hash] = {}; 106 | // iterate thru the fields in the current obj and check whether the current field 107 | // is __typename, if so continue to the next iteration 108 | for (const field in obj) { 109 | if (field === "__typename") continue; 110 | //check whether current field is not an array 111 | if (!Array.isArray(obj[field])) { 112 | //check whether current field is an object 113 | if (typeof obj[field] === "object" && obj[field] !== null) { 114 | output[hash][field] = labelId(obj[field]); 115 | output = createHash(obj[field], output); 116 | } else { 117 | output[hash][field] = obj[field]; 118 | } 119 | } // if it's an array of objects, iterate thru the array 120 | // create a hash for each obj in the array and store it in an array 121 | // recursive call on the current obj in the array 122 | // store the output of the recursive call in output 123 | else { 124 | output[hash][field] = []; 125 | obj[field].forEach((obj) => { 126 | const arrayHash = labelId(obj); 127 | output[hash][field].push(arrayHash); 128 | output = createHash(obj, output); 129 | }); 130 | // store hashed values in output 131 | } 132 | } 133 | return output; 134 | } 135 | 136 | function labelId(obj) { 137 | const id = obj.id || obj.ID || obj._id || obj._ID || obj.Id || obj._Id; 138 | return obj.__typename + "~" + id; 139 | } 140 | -------------------------------------------------------------------------------- /test_files/test_variables/readCache_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | cache: { 3 | ROOT_QUERY: { 4 | 'actor(id:1)': ['Actor~1'], 5 | movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 6 | actors: ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4'], 7 | 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4', 'Movie~5'], 8 | }, 9 | ROOT_MUTATION: {}, 10 | 'Movie~1': { 11 | id: '1', 12 | title: 'Indiana Jones and the Last Crusade', 13 | actors: ['Actor~1', 'Actor~2', 'Actor~6'], 14 | genre: 'ACTION', 15 | releaseYear: 1989, 16 | }, 17 | 'Movie~2': { 18 | id: '2', 19 | title: 'Empire Strikes Back', 20 | actors: ['Actor~1', 'Actor~3'], 21 | releaseYear: 1980, 22 | }, 23 | 'Movie~3': { 24 | id: '3', 25 | title: 'Witness', 26 | actors: ['Actor~1', 'Actor~4'], 27 | releaseYear: 1985, 28 | }, 29 | 'Movie~4': { 30 | id: '4', 31 | title: 'Air Force One', 32 | actors: ['Actor~1', 'Actor~5'], 33 | genre: 'ACTION', 34 | releaseYear: 1997, 35 | }, 36 | 'Movie~5': 'DELETED', 37 | 'Actor~1': { id: '1', firstName: 'Harrison' }, 38 | 'Actor~2': { id: '2', firstName: 'Sean' }, 39 | 'Actor~3': { id: '3', firstName: 'Mark' }, 40 | 'Actor~4': { id: '4', firstName: 'Patti' }, 41 | 'Actor~5': { id: '5', firstName: 'Gary' }, 42 | 'Actor~6': 'DELETED', 43 | }, 44 | singularInputQuery: ` 45 | query getActorById { 46 | actor(id: 1) { 47 | __typename 48 | id 49 | firstName 50 | } 51 | } 52 | `, 53 | undefinedInputQuery: ` 54 | query getActorById { 55 | actor(id: 1) { 56 | __typename 57 | id 58 | firstName 59 | lastName 60 | } 61 | } 62 | `, 63 | multipleInputQuery: ` 64 | query AllActionMoviesAndAllActors { 65 | movies(input: { genre: ACTION }) { 66 | __typename 67 | id 68 | title 69 | genre 70 | actors { 71 | __typename 72 | id 73 | firstName 74 | } 75 | } 76 | actors { 77 | __typename 78 | id 79 | firstName 80 | } 81 | } 82 | } 83 | `, 84 | fieldsUndefined: { 85 | __typename: 'meta', 86 | id: 'scalar', 87 | firstName: 'scalar', 88 | lastName: 'scalar', 89 | }, 90 | fieldsComplete: { __typename: 'meta', id: 'scalar', firstName: 'scalar' }, 91 | singularQueryResObj: { 92 | data: { 93 | actor: [ 94 | { 95 | __typename: 'Actor', 96 | id: '1', 97 | firstName: 'Harrison', 98 | }, 99 | ], 100 | }, 101 | }, 102 | multipleQueriesResObj: { 103 | data: { 104 | movies: [ 105 | { 106 | __typename: 'Movie', 107 | id: '1', 108 | title: 'Indiana Jones and the Last Crusade', 109 | genre: 'ACTION', 110 | actors: [ 111 | { 112 | __typename: 'Actor', 113 | id: '1', 114 | firstName: 'Harrison', 115 | }, 116 | { 117 | __typename: 'Actor', 118 | id: '2', 119 | firstName: 'Sean', 120 | }, 121 | ], 122 | }, 123 | { 124 | __typename: 'Movie', 125 | id: '4', 126 | title: 'Air Force One', 127 | genre: 'ACTION', 128 | actors: [ 129 | { 130 | __typename: 'Actor', 131 | id: '1', 132 | firstName: 'Harrison', 133 | }, 134 | { 135 | __typename: 'Actor', 136 | id: '5', 137 | firstName: 'Gary', 138 | }, 139 | ], 140 | }, 141 | ], 142 | actors: [ 143 | { 144 | __typename: 'Actor', 145 | id: '1', 146 | firstName: 'Harrison', 147 | }, 148 | { __typename: 'Actor', id: '2', firstName: 'Sean' }, 149 | { __typename: 'Actor', id: '3', firstName: 'Mark' }, 150 | { __typename: 'Actor', id: '4', firstName: 'Patti' }, 151 | ], 152 | }, 153 | }, 154 | queryStrDelete: ` 155 | query AllActionMoviesAndAllActors { 156 | movies(input: { genre: ACTION }) { 157 | __typename 158 | id 159 | title 160 | genre 161 | actors { 162 | __typename 163 | id 164 | firstName 165 | } 166 | } 167 | actors { 168 | __typename 169 | id 170 | firstName 171 | } 172 | } 173 | } 174 | `, 175 | aliasQueryString: ` 176 | { 177 | jediHero: getHero(episode: "jedi") { 178 | __typename 179 | id 180 | name 181 | } empireHero: getHero(episode: "empire") { 182 | __typename 183 | name 184 | id 185 | } 186 | 187 | }`, 188 | aliasResObj: { 189 | data: { 190 | empireHero: [ 191 | { 192 | __typename: 'Hero', 193 | id: 1, 194 | name: 'Luke Skywalker', 195 | }, 196 | ], 197 | jediHero: [ 198 | { 199 | __typename: 'Hero', 200 | id: 2, 201 | name: 'R2-D2', 202 | }, 203 | ], 204 | }, 205 | }, 206 | aliasCache: { 207 | ROOT_QUERY: { 208 | 'getHero(episode:"empire")': ['Hero~1'], 209 | 'getHero(episode:"jedi")': ['Hero~2'], 210 | }, 211 | ROOT_MUTATION: {}, 212 | 'Hero~1': { 213 | id: 1, 214 | name: 'Luke Skywalker', 215 | }, 216 | 'Hero~2': { 217 | id: 2, 218 | name: 'R2-D2', 219 | }, 220 | }, 221 | }; 222 | -------------------------------------------------------------------------------- /src/invalidateCacheCheck.ts: -------------------------------------------------------------------------------- 1 | /** @format */ 2 | import { gql } from "https://deno.land/x/oak_graphql@0.6.4/mod.ts"; 3 | import { visit } from "https://deno.land/x/graphql_deno@v15.0.0/mod.ts"; 4 | import { scope } from './Obsidian.ts'; 5 | import { Cache } from './quickCache.js'; 6 | import { deepEqual } from './utils.js'; 7 | 8 | // const cache = new Cache(); 9 | 10 | /** 11 | * @param {any} gqlQuery - Object containing the query string 12 | * @param {boolean} isMutation - Boolean indicating if it's a mutation query 13 | * @return {boolean} isMutation 14 | */ 15 | export function isMutation(gqlQuery: { query: string }): boolean { 16 | let isMutation: boolean = false; 17 | let ast: any = gql(gqlQuery.query); 18 | 19 | const checkMutationVisitor: object = { 20 | OperationDefinition: (node: { operation: string }) => { 21 | if (node.operation === 'mutation') { 22 | isMutation = true; 23 | } 24 | }, 25 | }; 26 | 27 | // left this piece of code in case someone decides to build upon subscriptions, but for now obsidian doesn't do anything with subscriptions 28 | const subscriptionTunnelVisitor = { 29 | OperationDefinition: (node: { operation: string }) => { 30 | if (node.operation === 'subscription') { 31 | } 32 | }, 33 | }; 34 | 35 | visit(ast, { enter: subscriptionTunnelVisitor, leave: checkMutationVisitor }); 36 | return isMutation; 37 | } 38 | 39 | /** 40 | * Invalidates cache in redis based on the mutation type. 41 | * @param {object} normalizedMutation - Object containing hash val in redis as key and normalized object as value. 42 | * Ex: { 43 | * ~7~Movie: {id: 7, __typename: Movie, title: Ad Astra, releaseYear: 2019} 44 | * } 45 | * @param {string} queryString - raw mutation query. Passed onto isDelete function 46 | * Ex: 'mutation { addMovie(input: {title: "sdfsdg", releaseYear: 1234, genre: ACTION }) { __typename id ti...' 47 | * @return {void} 48 | */ 49 | export async function invalidateCache( 50 | normalizedMutation: { [key: string]: object }, 51 | queryString: string, 52 | mutationTableMap: Record 53 | ) { 54 | let normalizedData: object; 55 | let cachedVal: any; 56 | 57 | // Common case is that we get one mutation at a time. But it's possible to group multiple mutation queries into one. 58 | // That's why the for loop is needed 59 | for (const redisKey in normalizedMutation) { 60 | normalizedData = normalizedMutation[redisKey]; 61 | cachedVal = await scope.cache.cacheReadObject(redisKey); 62 | 63 | // if response objects from mutation and cache are deeply equal then we delete it from cache because it infers that it's a delete mutation 64 | if ( 65 | (cachedVal !== undefined && deepEqual(normalizedData, cachedVal)) || 66 | isDelete(queryString) 67 | ) { 68 | await scope.cache.cacheDelete(redisKey); 69 | } 70 | else { 71 | // Otherwise it's an update or add mutation because response objects from mutation and cache don't match. 72 | // We overwrite the existing cache value or write new data if cache at that key doesn't exist 73 | // Edge case: update is done without changing any values... cache will be deleted from redis because the response obj and cached obj will be equal 74 | if (cachedVal === undefined) { // checks if add mutation 75 | let ast = gql(queryString); 76 | const mutationType = 77 | ast.definitions[0].selectionSet.selections[0].name.value; // Extracts mutationType from query string 78 | 79 | const staleRefs: Array = mutationTableMap[mutationType]; // Grabs array of affected data tables from dev specified mutationTableMap 80 | 81 | const rootQueryContents = await scope.cache.redis.hgetall('ROOT_QUERY'); // Creates array of all query keys and values in ROOT_QUERY from Redis 82 | 83 | for (let j = 0; j < staleRefs.length; j++) { // Checks for all query keys that refer to the affected tables and deletes them from Redis 84 | for (let i = 0; i < rootQueryContents.length; i += 2) { 85 | if ( 86 | staleRefs[j] === rootQueryContents[i].slice(0, staleRefs[j].length) 87 | ) { 88 | scope.cache.redis.hdel('ROOT_QUERY', rootQueryContents[i]); 89 | } 90 | } 91 | } 92 | } 93 | await scope.cache.cacheWriteObject(redisKey, normalizedData); // Adds or updates reference in redis cache 94 | } 95 | } 96 | } 97 | 98 | /** 99 | * Returns a boolean that's used to decide on deleting a value from cache 100 | * @param {string} queryString - raw mutation query. 101 | * Ex: 'mutation { addMovie(input: {title: "sdfsdg", releaseYear: 1234, genre: ACTION }) { __typename id ti...' 102 | * @return {boolean} isDeleteFlag 103 | */ 104 | export function isDelete(queryString: string) { 105 | // Because we check if response object from delete mutation equals to cached object to determine if it's a delete mutation 106 | // but there may be instances that the object is evicted from cache or never cached previously which would be treated as add or update mutation 107 | // if we find any keywords we're looking for in the mutation query that infer deletion we force the deletion 108 | const deleteKeywords: Array = ['delete', 'remove']; 109 | let isDeleteFlag: boolean = false; 110 | 111 | for (const keyword of deleteKeywords) { 112 | const regex = new RegExp(keyword); 113 | // if query string contains any of the keywords in the deleteKeywords array we set the flag to true and break out of the loop 114 | if (queryString.search(regex) !== -1) { 115 | isDeleteFlag = true; 116 | break; 117 | } 118 | } 119 | return isDeleteFlag; 120 | } 121 | -------------------------------------------------------------------------------- /src/normalize.ts: -------------------------------------------------------------------------------- 1 | /* ----------------------------------------------------------------*/ 2 | 3 | /** containsHashableObject - 4 | * Returns a boolean indicating that the passed in value contains a hashable object. It must: 5 | * 1) Be an object 6 | * 2) Has all hashable keys 7 | * 8 | * @param {any} objectInQuestion Object being tested contains hashable object 9 | * @param {Array} hashableKeys Array of hashable keys 10 | * @return {boolean} Boolean indicating if objectInQuestion is hashable or not 11 | */ 12 | 13 | export const containsHashableObject = ( 14 | objectInQuestion: any, 15 | hashableKeys: Array 16 | ): boolean => { 17 | if ( 18 | typeof objectInQuestion !== 'object' || 19 | Array.isArray(objectInQuestion) || 20 | !objectInQuestion 21 | ) 22 | return false; 23 | const objectInQuestionKeysSet = new Set(Object.keys(objectInQuestion)); 24 | return hashableKeys.every((key) => objectInQuestionKeysSet.has(key)); 25 | }; 26 | /* ----------------------------------------------------------------*/ 27 | 28 | /* ----------------------------------------------------------------*/ 29 | /** isHashableObject - 30 | * Returns a boolean indicating that the passed in value is hashable. It must: 31 | * 1) Contain hashable object 32 | * 2) Does not have any nesting (i.e., contains no objects or array values) 33 | * 34 | * @param {any} objectInQuestion Object being tested if hashable 35 | * @param {Array} hashableKeys Array of hashable keys 36 | * @return {boolean} Boolean indicating if objectInQuestion is hashable or not 37 | */ 38 | export const isHashableObject = ( 39 | objectInQuestion: any, 40 | hashableKeys: Array 41 | ): boolean => { 42 | if (!containsHashableObject(objectInQuestion, hashableKeys)) return false; 43 | for (const key in objectInQuestion) { 44 | if (typeof objectInQuestion[key] === 'object') return false; 45 | } 46 | return true; 47 | }; 48 | /* ----------------------------------------------------------------*/ 49 | 50 | /* ----------------------------------------------------------------*/ 51 | export type GenericObject = { [key: string]: any }; 52 | type FlatObject = { [key: string]: string | number | boolean }; 53 | /** hashMaker - 54 | * Creates unique hash string for an object with hashable keys with hashable object passed in 55 | * 56 | * @param {FlatObject} hashableObject Object that is hashable 57 | * @param {Array} hashableKeys Array of hashable keys 58 | * @return {string} Hash string 59 | */ 60 | export const hashMaker = ( 61 | hashableObject: FlatObject, 62 | hashableKeys: Array 63 | ): string => { 64 | let hash = ''; 65 | for (let i = 0; i < hashableKeys.length; i++) { 66 | hash += hashableObject[hashableKeys[i]]; 67 | if (i < hashableKeys.length - 1) hash += '~' 68 | } 69 | return hash; 70 | }; 71 | /* ----------------------------------------------------------------*/ 72 | 73 | /* ----------------------------------------------------------------*/ 74 | /** printHashableObject - 75 | * Creates a hashable object from an object that contains a hashable object. Does not print hashable object 76 | * 77 | * @param {FlatObject} containsHashableObject Object that is hashable 78 | * @return {GenericObject} A hashable object 79 | */ 80 | export const printHashableObject = ( 81 | containsHashableObject: GenericObject 82 | ): GenericObject => { 83 | const hashObj: GenericObject = {}; 84 | for (const key in containsHashableObject) { 85 | if ( 86 | typeof containsHashableObject[key] !== 'object' && 87 | !hashObj.hasOwnProperty(key) 88 | ) 89 | hashObj[key] = containsHashableObject[key]; 90 | } 91 | return hashObj; 92 | }; 93 | /* ----------------------------------------------------------------*/ 94 | 95 | /* ----------------------------------------------------------------*/ 96 | 97 | /** 98 | * Recursively flattens an arbitrarily nested object into an objects with hash key and hashable object pairs 99 | * 100 | * For each key in object (typeof === 'object', meaning it can be array): 101 | * 102 | * 1) If current object contains hashable object and if it hasn't printed already, 103 | * it prints a hashable object and makes its associated hash. If hash doesn't exist in normalizedHashableObjects, 104 | * it adds hash key and hashable object pair. 105 | * 106 | * 2) If the value at the current key is an object (typeof === 'object', meaning it can be array), it recursively 107 | * calls normalizeObject with the value passed in. This recursive calls goes inside arbitrary nesting. 108 | * 109 | * 3) Return normalizedHashableObjects. In the outer most execution context, this will return the output of the function. 110 | * In inner execution contexts, this will return that execution context's normalizedHashableObjects. 111 | * 112 | * @param {GenericObject} nestedObject Nested object 113 | * @param {Array} hashableKeys Array of hashable keys 114 | * @return {FlatObject} Normalized object with hash keys and hashable object pairs 115 | */ 116 | export const normalizeObject = ( 117 | nestedObject: GenericObject, 118 | hashableKeys: Array, 119 | normalizedHashableObjects: GenericObject = {} 120 | ): GenericObject => { 121 | let hasAlreadyPrinted = false; 122 | for (const key in nestedObject) { 123 | if ( 124 | containsHashableObject(nestedObject, hashableKeys) && 125 | hasAlreadyPrinted === false 126 | ) { 127 | hasAlreadyPrinted = true; 128 | const hashableObject = printHashableObject(nestedObject); 129 | const hash = hashMaker(hashableObject, hashableKeys); 130 | if (!normalizedHashableObjects.hasOwnProperty(hash)) 131 | normalizedHashableObjects[hash] = hashableObject; 132 | } 133 | if (typeof nestedObject[key] === 'object') 134 | normalizeObject( 135 | nestedObject[key], 136 | hashableKeys, 137 | normalizedHashableObjects 138 | ); 139 | } 140 | return normalizedHashableObjects; 141 | }; 142 | -------------------------------------------------------------------------------- /test_files/rhum_test_files/destructure_test.ts: -------------------------------------------------------------------------------- 1 | import { Rhum } from 'https://deno.land/x/rhum@v1.1.11/mod.ts'; 2 | import destructureQueries, { 3 | findQueryStrings, 4 | createQueriesObj, 5 | splitUpQueryStr, 6 | findQueryFields, 7 | findClosingBrace, 8 | } from '../../src/Browser/destructure.js'; 9 | import { test } from '../test_variables/destructure_variables.ts'; 10 | 11 | Rhum.testPlan('destructure.ts', () => { 12 | Rhum.testSuite('destructure helper function tests', () => { 13 | Rhum.testCase('findQueryStrings test', () => { 14 | const results = findQueryStrings(test.findQueryStringsTestData); 15 | Rhum.asserts.assertEquals(test.findQueryStringsResultData, results); 16 | }); 17 | Rhum.testCase('createQueriesObj test', () => { 18 | const results = createQueriesObj( 19 | test.createQueriesObjTestData, 20 | 'queries' 21 | ); 22 | Rhum.asserts.assertEquals(test.createQueriesObjResultsData, results); 23 | }); 24 | Rhum.testCase('findQueryFields test', () => { 25 | const results = findQueryFields(test.findQueryFieldsTestData); 26 | Rhum.asserts.assertEquals(test.findQueryFieldsResultData, results); 27 | }); 28 | Rhum.testCase('findClosingBrace test', () => { 29 | const results = findClosingBrace(test.findClosingBraceTestData, 62); 30 | Rhum.asserts.assertEquals(test.findClosingBraceResultData, results); 31 | }); 32 | }); 33 | 34 | Rhum.testSuite('destructure single query tests', () => { 35 | Rhum.testCase('destructure single query string - no inputs', () => { 36 | const result = destructureQueries(test.ALL_ACTORS); 37 | Rhum.asserts.assertEquals(test.allActorsTestResult, result); 38 | }); 39 | Rhum.testCase('destructure single query string - inputs', () => { 40 | const result = destructureQueries(test.ALL_ACTION_MOVIES); 41 | Rhum.asserts.assertEquals(test.allActionTestResult, result); 42 | }); 43 | }); 44 | 45 | Rhum.testSuite('destructure multi query tests', () => { 46 | Rhum.testCase('destructure multi query - input / non input', () => { 47 | const result = destructureQueries(test.ALL_ACTION_MOVIES_AND_ALL_ACTORS); 48 | Rhum.asserts.assertEquals(test.allActionActorsTestResult, result); 49 | }); 50 | }); 51 | 52 | Rhum.testSuite('destructure alias query tests', () => { 53 | Rhum.testCase('destructure multi alias query - input / non input', () => { 54 | const result = destructureQueries(test.newAliasTestQuery); 55 | Rhum.asserts.assertEquals(test.newAliasTestResult, result); 56 | }); 57 | }); 58 | 59 | Rhum.testSuite('destructure fragment tests', () => { 60 | Rhum.testCase( 61 | 'destructure fragment tests - results in two seperate queries', 62 | () => { 63 | const result = destructureQueries(test.fragmentTestData); 64 | Rhum.asserts.assertEquals(test.fragmentResultData, result); 65 | } 66 | ); 67 | Rhum.testCase('destructure fragment tests - results in one query', () => { 68 | const result = destructureQueries(test.fragmentTestData2); 69 | Rhum.asserts.assertEquals(test.fragmentResultData2, result); 70 | }); 71 | Rhum.testCase('destructure fragment tests - nested fragments', () => { 72 | const result = destructureQueries(test.fragmentTestData3); 73 | Rhum.asserts.assertEquals(test.fragmentResultData3, result); 74 | }); 75 | }); 76 | 77 | // single variable test 78 | Rhum.testSuite('destructure single variable query tests', () => { 79 | Rhum.testCase('destructure single variable query string', () => { 80 | const result = destructureQueries( 81 | test.singleVariableTestData, 82 | test.singleVariableTestValue 83 | ); 84 | Rhum.asserts.assertEquals(test.singleVariableTestResult, result); 85 | }); 86 | }); 87 | 88 | // multi variable test 89 | Rhum.testSuite('destructure multi variable query tests', () => { 90 | Rhum.testCase('destructure multi variable query', () => { 91 | const result = destructureQueries( 92 | test.multiVariableTestData, 93 | test.multiVariableTestValue 94 | ); 95 | Rhum.asserts.assertEquals(test.multiVariableTestResult, result); 96 | }); 97 | }); 98 | 99 | // single directive test - @include: true 100 | Rhum.testSuite('destructure @include directive query tests', () => { 101 | Rhum.testCase('destructure @include directive (true) query', () => { 102 | const result = destructureQueries( 103 | test.includeDirectiveTestData, 104 | test.includeDirectiveTrueValues 105 | ); 106 | Rhum.asserts.assertEquals(test.includeDirectiveTrueResult, result); 107 | }); 108 | }); 109 | 110 | // single directive test - @include: false 111 | Rhum.testSuite('destructure @include directive query tests', () => { 112 | Rhum.testCase('destructure @include directive (false) query', () => { 113 | const result = destructureQueries( 114 | test.includeDirectiveTestData, 115 | test.includeDirectiveFalseValues 116 | ); 117 | 118 | Rhum.asserts.assertEquals(test.includeDirectiveFalseResult, result); 119 | }); 120 | }); 121 | }); 122 | 123 | // single directive test - @skip: true 124 | Rhum.testSuite('destructure @skip directive query tests', () => { 125 | Rhum.testCase('destructure @skip directive (true) query', () => { 126 | const result = destructureQueries( 127 | test.skipDirectiveTestData, 128 | test.skipDirectiveTrueValues 129 | ); 130 | Rhum.asserts.assertEquals(test.skipDirectiveTrueResult, result); 131 | }); 132 | }); 133 | 134 | // single directive test - @skip: false 135 | Rhum.testSuite('destructure @skip directive query tests', () => { 136 | Rhum.testCase('destructure @skip directive (false) query', () => { 137 | const result = destructureQueries( 138 | test.skipDirectiveTestData, 139 | test.skipDirectiveFalseValues 140 | ); 141 | 142 | Rhum.asserts.assertEquals(test.skipDirectiveFalseResult, result); 143 | }); 144 | }); 145 | 146 | // TO-DO: queries with multiple directives (not just one @include/@skip) 147 | 148 | Rhum.run(); 149 | -------------------------------------------------------------------------------- /test_files/test_variables/writeCache_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | queryStr: ` 3 | query AllMoviesAndGetActorById { 4 | movies { 5 | __typename 6 | id 7 | title 8 | actors { 9 | __typename 10 | id 11 | firstName 12 | } 13 | } 14 | actor(id: 1) { 15 | __typename 16 | id 17 | firstName 18 | LastName 19 | } 20 | } 21 | `, 22 | queryStrTwo: ` 23 | query AllMoviesAndGetActorById { 24 | movies { 25 | __typename 26 | id 27 | nickname 28 | actors { 29 | __typename 30 | id 31 | firstName 32 | } 33 | } 34 | actor(id: 1) { 35 | __typename 36 | id 37 | firstName 38 | LastName 39 | } 40 | } 41 | `, 42 | respObj: { 43 | data: { 44 | movies: [ 45 | { 46 | __typename: 'Movie', 47 | id: '1', 48 | title: 'Indiana Jones and the Last Crusade', 49 | actors: [ 50 | { __typename: 'Actor', id: '1', firstName: 'Harrison' }, 51 | { __typename: 'Actor', id: '2', firstName: 'Sean' }, 52 | ], 53 | }, 54 | { 55 | __typename: 'Movie', 56 | id: '2', 57 | title: 'Empire Strikes Back', 58 | actors: [ 59 | { __typename: 'Actor', id: '1', firstName: 'Harrison' }, 60 | { __typename: 'Actor', id: '3', firstName: 'Mark' }, 61 | ], 62 | }, 63 | { 64 | __typename: 'Movie', 65 | id: '3', 66 | title: 'Witness', 67 | actors: [ 68 | { __typename: 'Actor', id: '1', firstName: 'Harrison' }, 69 | { __typename: 'Actor', id: '4', firstName: 'Patti' }, 70 | ], 71 | }, 72 | { 73 | __typename: 'Movie', 74 | id: '4', 75 | title: 'Air Force One', 76 | actors: [ 77 | { __typename: 'Actor', id: '1', firstName: 'Harrison' }, 78 | { __typename: 'Actor', id: '5', firstName: 'Gary' }, 79 | ], 80 | }, 81 | ], 82 | actor: [ 83 | { 84 | __typename: 'Actor', 85 | id: '1', 86 | firstName: 'Harrison', 87 | lastName: 'Ford', 88 | }, 89 | ], 90 | }, 91 | }, 92 | toAddInCache: { 93 | ROOT_QUERY: { 94 | movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 95 | 'actor(id:1)': ['Actor~1'], 96 | }, 97 | 'Movie~1': { 98 | id: '1', 99 | title: 'Indiana Jones and the Last Crusade', 100 | actors: ['Actor~1', 'Actor~2'], 101 | }, 102 | 'Actor~1': { id: '1', firstName: 'Harrison', lastName: 'Ford' }, 103 | 'Actor~2': { id: '2', firstName: 'Sean' }, 104 | 'Movie~2': { 105 | id: '2', 106 | title: 'Empire Strikes Back', 107 | actors: ['Actor~1', 'Actor~3'], 108 | }, 109 | 'Actor~3': { id: '3', firstName: 'Mark' }, 110 | 'Movie~3': { id: '3', title: 'Witness', actors: ['Actor~1', 'Actor~4'] }, 111 | 'Actor~4': { id: '4', firstName: 'Patti' }, 112 | 'Movie~4': { 113 | id: '4', 114 | title: 'Air Force One', 115 | actors: ['Actor~1', 'Actor~5'], 116 | }, 117 | 'Actor~5': { id: '5', firstName: 'Gary' }, 118 | }, 119 | expectedResultCache: { 120 | ROOT_QUERY: { 121 | movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 122 | 'actor(id:1)': ['Actor~1'], 123 | 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'], 124 | actors: ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4', 'Actor~5'], 125 | }, 126 | ROOT_MUTATION: {}, 127 | 'Movie~1': { 128 | id: '1', 129 | title: 'Indiana Jones and the Last Crusade', 130 | actors: ['Actor~1', 'Actor~2'], 131 | genre: 'ACTION', 132 | runtime: '12 minutes', 133 | }, 134 | 'Movie~4': { 135 | id: '4', 136 | title: 'Air Force One', 137 | actors: ['Actor~1', 'Actor~5'], 138 | genre: 'ACTION', 139 | }, 140 | 'Actor~1': { id: '1', firstName: 'Harrison', lastName: 'Ford' }, 141 | 'Actor~2': { 142 | id: '2', 143 | firstName: 'Sean', 144 | lastName: 'Connery', 145 | films: ['Movie~1'], 146 | }, 147 | 'Movie~2': { 148 | id: '2', 149 | title: 'Empire Strikes Back', 150 | actors: ['Actor~1', 'Actor~3'], 151 | }, 152 | 'Actor~3': { id: '3', firstName: 'Mark' }, 153 | 'Movie~3': { id: '3', title: 'Witness', actors: ['Actor~1', 'Actor~4'] }, 154 | 'Actor~4': { id: '4', firstName: 'Patti' }, 155 | 'Actor~5': { id: '5', firstName: 'Gary' }, 156 | }, 157 | originalCache: { 158 | ROOT_QUERY: { 159 | 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'], 160 | actors: ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4', 'Actor~5'], 161 | }, 162 | ROOT_MUTATION: {}, 163 | 'Movie~1': { 164 | id: '1', 165 | title: 'Indiana Jones and the Last Crusade', 166 | actors: ['Actor~1', 'Actor~2'], 167 | genre: 'ACTION', 168 | runtime: '12 minutes', 169 | }, 170 | 'Movie~2': { 171 | id: '2', 172 | title: 'Empire Strikes Back', 173 | }, 174 | 'Movie~3': { 175 | id: '3', 176 | title: 'Witness', 177 | }, 178 | 'Movie~4': { 179 | id: '4', 180 | title: 'Air Force One', 181 | actors: ['Actor~1', 'Actor~5'], 182 | genre: 'ACTION', 183 | }, 184 | 185 | 'Actor~2': { 186 | id: '2', 187 | firstName: 'Sean', 188 | lastName: 'Connery', 189 | films: ['Movie~1'], 190 | }, 191 | }, 192 | aliasQuery: ` 193 | query twoHeros { 194 | jediHero: getHero(episode: "jedi") { 195 | __typename 196 | id 197 | name 198 | } 199 | empireHero: getHero(episode: "empire") { 200 | __typename 201 | id 202 | name 203 | } 204 | } 205 | `, 206 | aliasResponse: { 207 | data: { 208 | jediHero: { 209 | __typename: 'Hero', 210 | id: 2, 211 | name: 'R2-D2', 212 | }, 213 | empireHero: { 214 | __typename: 'Hero', 215 | id: 1, 216 | name: 'Luke Skywalker', 217 | }, 218 | }, 219 | }, 220 | }; 221 | -------------------------------------------------------------------------------- /test_files/test_variables/quickCacheLight.js: -------------------------------------------------------------------------------- 1 | // need redis v0.23.2 to be compatible with Rhum v.1.1.11 testing. That is why we need to separate quickCacheLight from quickCache.js 2 | import { connect } from 'https://deno.land/x/redis@v0.23.2/mod.ts'; 3 | 4 | // set up a redis sever 5 | let redis; 6 | const context = 'server'; 7 | 8 | if (context === 'server') { 9 | redis = await connect({ 10 | hostname: '127.0.0.1', 11 | port: 6379, 12 | }); 13 | } 14 | 15 | export class Cache { 16 | constructor( 17 | initialCache = { 18 | ROOT_QUERY: {}, 19 | ROOT_MUTATION: {}, 20 | } 21 | ) { 22 | this.storage = initialCache; 23 | this.context = "server"; 24 | } 25 | 26 | // set cache configurations 27 | async configSet(parameter, value) { 28 | return await redis.configSet(parameter, value); 29 | } 30 | 31 | // Main functionality methods 32 | // for reading the inital query 33 | async read(queryStr) { 34 | console.log('in the read func'); 35 | //the queryStr it gets is the JSON stringified 36 | const returnedValue = await this.cacheRead(queryStr); 37 | console.log('returnedValue -> ', returnedValue) 38 | 39 | if (("returnedValue", returnedValue)) { 40 | return JSON.parse(returnedValue); 41 | } else { 42 | return undefined; 43 | } 44 | } 45 | async write(queryStr, respObj) { 46 | // update the original cache with same reference 47 | await this.cacheWrite(queryStr, JSON.stringify(respObj)); 48 | } 49 | 50 | //will overwrite a list at the given hash by default 51 | //if you pass a false value to overwrite, it will append the list items to the end 52 | 53 | //Probably be used in normalize 54 | cacheWriteList = async (hash, array, overwrite = true) => { 55 | if (overwrite) { 56 | await redis.del(hash); 57 | } 58 | array = array.map((element) => JSON.stringify(element)); 59 | await redis.rpush(hash, ...array); 60 | }; 61 | 62 | cacheReadList = async (hash) => { 63 | let cachedArray = await redis.lrange(hash, 0, -1); 64 | cachedArray = cachedArray.map((element) => JSON.parse(element)); 65 | 66 | return cachedArray; 67 | }; 68 | 69 | cacheWriteObject = async (hash, obj) => { 70 | let entries = Object.entries(obj).flat(); 71 | entries = entries.map((entry) => JSON.stringify(entry)); 72 | 73 | await redis.hset(hash, ...entries); 74 | }; 75 | 76 | cacheReadObject = async (hash, field) => { 77 | if (field) { 78 | let returnValue = await redis.hget(hash, JSON.stringify(field)); 79 | 80 | if (returnValue === undefined) return undefined; 81 | return JSON.parse(returnValue); 82 | } else { 83 | let objArray = await redis.hgetall(hash); 84 | if (objArray.length == 0) return undefined; 85 | let parsedArray = objArray.map((entry) => JSON.parse(entry)); 86 | 87 | if (parsedArray.length % 2 !== 0) { 88 | return undefined; 89 | } 90 | let returnObj = {}; 91 | for (let i = 0; i < parsedArray.length; i += 2) { 92 | returnObj[parsedArray[i]] = parsedArray[i + 1]; 93 | } 94 | 95 | return returnObj; 96 | } 97 | }; 98 | 99 | createBigHash(inputfromQuery) { 100 | let ast = gql(inputfromQuery); 101 | 102 | let returned = visit(ast, { enter: print(ast) }); 103 | let finalReturn = print(returned); 104 | return JSON.stringify(finalReturn); 105 | } 106 | 107 | async cacheRead(hash) { 108 | console.log('in the cacheRead func'); 109 | console.log('context: ', context); 110 | console.log('hash: ', hash); 111 | 112 | if (this.context === "client") { 113 | return this.storage[hash]; 114 | } else { 115 | console.log('In the else block...') 116 | if (hash === "ROOT_QUERY" || hash === "ROOT_MUTATION") { 117 | const hasRootQuery = await redis.get("ROOT_QUERY"); 118 | 119 | if (!hasRootQuery) { 120 | await redis.set("ROOT_QUERY", JSON.stringify({})); 121 | } 122 | const hasRootMutation = await redis.get("ROOT_MUTATION"); 123 | 124 | if (!hasRootMutation) { 125 | await redis.set("ROOT_MUTATION", JSON.stringify({})); 126 | } 127 | } 128 | let hashedQuery = await redis.get(hash); 129 | console.log('Response from redis -> ', hashedQuery) 130 | 131 | if (hashedQuery === undefined) return undefined; 132 | return JSON.parse(hashedQuery); 133 | } 134 | } 135 | async cacheWrite(hash, value) { 136 | // writes value to object cache or JSON.stringified value to redis cache 137 | if (this.context === "client") { 138 | this.storage[hash] = value; 139 | } else { 140 | value = JSON.stringify(value); 141 | await redis.setex(hash, 6000, value); 142 | let hashedQuery = await redis.get(hash); 143 | } 144 | } 145 | 146 | async cacheWriteList(hash, array) { 147 | await redis.rpush(hash, ...array); 148 | } 149 | 150 | async cacheReadList(hash) { 151 | let cachedArray = await redis.lrange(hash, 0, -1); 152 | return cachedArray; 153 | } 154 | 155 | async cacheDelete(hash) { 156 | // deletes the hash/value pair on either object cache or redis cache 157 | if (this.context === "client") { 158 | delete this.storage[hash]; 159 | } else await redis.del(hash); 160 | } 161 | async cacheClear() { 162 | // erases either object cache or redis cache 163 | if (this.context === "client") { 164 | this.storage = { ROOT_QUERY: {}, ROOT_MUTATION: {} }; 165 | } else { 166 | await redis.flushdb((err, successful) => { 167 | if (err) console.log("redis error", err); 168 | console.log(successful, "clear"); 169 | }); 170 | await redis.set("ROOT_QUERY", JSON.stringify({})); 171 | await redis.set("ROOT_MUTATION", JSON.stringify({})); 172 | } 173 | } 174 | 175 | // functionality to stop polling 176 | stopPollInterval(interval) { 177 | clearInterval(interval); 178 | } 179 | } -------------------------------------------------------------------------------- /test_files/test_variables/restructure_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | fragmentTestData0: { 3 | query: 4 | `query { 5 | movies(input: { genre: ACTION }) { 6 | __typename 7 | id 8 | 9 | } 10 | actors { 11 | id 12 | films { 13 | __typename 14 | id 15 | title 16 | } 17 | } 18 | } 19 | ` 20 | }, 21 | 22 | fragmentResultData0: 23 | `query { 24 | movies(input: { genre: ACTION }) { 25 | __typename 26 | id 27 | 28 | } 29 | actors { 30 | id 31 | films { 32 | __typename 33 | id 34 | title 35 | } 36 | } 37 | }`, 38 | 39 | fragmentTestData: { 40 | query: 41 | `query { 42 | movies(input: { genre: ACTION }) { 43 | __typename 44 | id 45 | ...titleAndGenre 46 | } 47 | actors { 48 | id 49 | films { 50 | __typename 51 | id 52 | title 53 | } 54 | ...firstAndLast 55 | } 56 | } 57 | fragment titleAndGenre on Movie { 58 | title 59 | genre 60 | } 61 | fragment firstAndLast on Actors { 62 | firstName 63 | lastName 64 | } 65 | ` 66 | }, 67 | fragmentResultData: 68 | `query { 69 | movies(input: { genre: ACTION }) { 70 | __typename 71 | id 72 | title 73 | genre 74 | } 75 | actors { 76 | id 77 | films { 78 | __typename 79 | id 80 | title 81 | } 82 | firstName 83 | lastName 84 | } 85 | 86 | }`, 87 | 88 | 89 | fragmentTestData2: { 90 | query: 91 | `query { 92 | movies(input: { genre: ACTION }) { 93 | __typename 94 | id 95 | actors { 96 | id 97 | films { 98 | __typename 99 | id 100 | title 101 | } 102 | ...firstAndLast 103 | } 104 | ...titleAndGenre 105 | } 106 | } 107 | fragment titleAndGenre on Movie { 108 | title 109 | genre 110 | } 111 | fragment firstAndLast on Actors { 112 | firstName 113 | lastName 114 | }` 115 | 116 | }, 117 | fragmentResultData2: 118 | `query { 119 | movies(input: { genre: ACTION }) { 120 | __typename 121 | id 122 | actors { 123 | id 124 | films { 125 | __typename 126 | id 127 | title 128 | } 129 | firstName 130 | lastName 131 | } 132 | title 133 | genre 134 | } 135 | }`, 136 | 137 | fragmentTestData3: { 138 | query: 139 | ` 140 | query AllActionMovies { 141 | movies(input: { genre: ACTION }) { 142 | __typename 143 | id 144 | ...titleAndGenre 145 | actors { 146 | id 147 | ...firstAndLast 148 | } 149 | } 150 | } 151 | fragment titleAndGenre on Movie { 152 | title 153 | genre 154 | } 155 | fragment firstAndLast on Actors { 156 | firstName 157 | lastName 158 | }` 159 | }, 160 | 161 | fragmentResultData3: ` 162 | query AllActionMovies { 163 | movies(input: { genre: ACTION }) { 164 | __typename 165 | id 166 | title 167 | genre 168 | actors { 169 | id 170 | firstName 171 | lastName 172 | } 173 | } 174 | }`, 175 | 176 | singleVariableTestData: 177 | { 178 | variables: { 179 | "movieGenre": "ACTION" 180 | }, 181 | query: 182 | `query AllActionMoviesAndAllActors ($movieGenre: String) { 183 | movies(input: {genre: $movieGenre}) { 184 | __typename 185 | id 186 | title 187 | genre 188 | actors { 189 | id 190 | firstName 191 | lastName 192 | } 193 | 194 | actors { 195 | id 196 | firstName 197 | lastName 198 | films { 199 | __typename 200 | id 201 | title 202 | } 203 | } 204 | } 205 | }` 206 | 207 | }, 208 | singleVariableTestResult: 209 | `query AllActionMoviesAndAllActors { 210 | movies(input: {genre: ACTION}) { 211 | __typename 212 | id 213 | title 214 | genre 215 | actors { 216 | id 217 | firstName 218 | lastName 219 | } 220 | 221 | actors { 222 | id 223 | firstName 224 | lastName 225 | films { 226 | __typename 227 | id 228 | title 229 | } 230 | } 231 | } 232 | }`, 233 | 234 | multiVariableTestData: 235 | { 236 | variables: { 237 | "movieGenre": "ACTION", 238 | "actorID": "7" 239 | }, 240 | query: 241 | `query AllActionMoviesAndAllActors ($movieGenre: String, $actorID: ID) { 242 | movies(genre: $movieGenre) { 243 | __typename 244 | id 245 | title 246 | genre 247 | actors { 248 | id 249 | firstName 250 | lastName 251 | } 252 | 253 | actors (actor: $actorID) { 254 | id 255 | firstName 256 | lastName 257 | films { 258 | __typename 259 | id 260 | title 261 | } 262 | } 263 | } 264 | }`, 265 | }, 266 | 267 | multiVariableTestResult: 268 | ` query AllActionMoviesAndAllActors { 269 | movies(genre: ACTION) { 270 | __typename 271 | id 272 | title 273 | genre 274 | actors { 275 | id 276 | firstName 277 | lastName 278 | } 279 | 280 | actors (actor: 7) { 281 | id 282 | firstName 283 | lastName 284 | films { 285 | __typename 286 | id 287 | title 288 | } 289 | } 290 | } 291 | }`, 292 | } -------------------------------------------------------------------------------- /test_files/test_variables/wTinyLFU_variables.js: -------------------------------------------------------------------------------- 1 | // import { FrequencySketch } from '../../src/Browser/FrequencySketch.js' 2 | 3 | /***** 4 | * Overall w-TinyLFU Cache 5 | *****/ 6 | export default function WTinyLFUCache (capacity) { 7 | this.capacity = capacity; 8 | this.sketch = {}; 9 | 10 | // initialize window cache with access to frequency sketch 11 | this.WLRU = new LRUCache(capacity * .01); 12 | this.WLRU.sketch = this.sketch; 13 | // initialize segmented main cache with access to frequency sketch 14 | this.SLRU = new SLRUCache(capacity * .99); 15 | this.SLRU.probationaryLRU.sketch = this.sketch; 16 | this.SLRU.protectedLRU.sketch = this.sketch; 17 | } 18 | 19 | WTinyLFUCache.prototype.putAndPromote = async function (key, value) { 20 | const WLRUCandidate = this.WLRU.put(key, value); 21 | // if adding to the WLRU cache results in an eviction... 22 | if (WLRUCandidate) { 23 | // if the probationary cache is at capacity... 24 | let winner = WLRUCandidate; 25 | if (this.SLRU.probationaryLRU.nodeHash.size >= Math.floor(this.SLRU.probationaryLRU.capacity)) { 26 | // send the last accessed item in the probationary cache to the TinyLFU 27 | const SLRUCandidate = this.SLRU.probationaryLRU.getCandidate(); 28 | // determine which item will improve the hit-ratio most 29 | winner = await this.TinyLFU(WLRUCandidate, SLRUCandidate); 30 | } 31 | // add the winner to the probationary SLRU 32 | this.SLRU.probationaryLRU.put(winner.key, winner.value); 33 | } 34 | } 35 | 36 | WTinyLFUCache.prototype.TinyLFU = function (WLRUCandidate, SLRUCandidate) { 37 | // get the frequency values of both items 38 | const WLRUFreq = this.sketch[WLRUCandidate.key]; 39 | const SLRUFreq = this.sketch[SLRUCandidate.key]; 40 | // return the object with the higher frequency, prioritizing items in the window cache, 41 | return WLRUFreq >= SLRUFreq ? WLRUCandidate : SLRUCandidate; 42 | } 43 | 44 | /***** 45 | * Main SLRU Cache 46 | *****/ 47 | function SLRUCache(capacity) { 48 | // Probationary LRU Cache using existing LRU structure in lruBrowserCache.js 49 | this.probationaryLRU = new LRUCache(capacity * .20); 50 | // Protected LRU Cache 51 | this.protectedLRU = new LRUCache(capacity * .80); 52 | } 53 | 54 | // Get item from cache, updates last access, 55 | // and promotes existing items to protected 56 | SLRUCache.prototype.get = function (key) { 57 | // get the item from the protectedLRU 58 | const protectedItem = this.protectedLRU.get(key); 59 | // check to see if the item is in the probationaryLRU 60 | const probationaryItem = this.probationaryLRU.peek(key); 61 | 62 | // If the item is in neither segment, return undefined 63 | if (protectedItem === null && probationaryItem === null) return; 64 | 65 | // If the item only exists in the protected segment, return that item 66 | if (protectedItem !== null) return protectedItem; 67 | 68 | // If the item only exists in the probationary segment, promote to protected and return item 69 | // if adding an item to the protectedLRU results in ejection, demote ejected node 70 | this.probationaryLRU.delete(key); 71 | this.putAndDemote(key, probationaryItem); 72 | return probationaryItem; 73 | } 74 | 75 | // add or update item in cache 76 | SLRUCache.prototype.put = function (key, node) { 77 | // if the item is in the protected segment, update it 78 | if (this.protectedLRU.nodeHash.get(key)) this.putAndDemote(key, node); 79 | else if (this.probationaryLRU.nodeHash(key)) { 80 | // if the item is in the probationary segment, 81 | // promote and update it 82 | this.probationaryLRU.delete(key); 83 | this.putAndDemote(key, node); 84 | } 85 | // if in neither, add item to the probationary segment 86 | else this.probationaryLRU.put(key, node) 87 | } 88 | 89 | // Check to see if the item exists in the cache without updating access 90 | SLRUCache.prototype.has = function (key) { 91 | return this.protectedLRU.nodeHash.get(key) || this.probationaryLRU.nodeHash.get(key); 92 | } 93 | 94 | // Adds a node to the protectedLRU 95 | SLRUCache.prototype.putAndDemote = function (key, value) { 96 | // if adding an item to the protectedLRU results in ejection, demote ejected node 97 | const demoted = this.protectedLRU.put(key, value); 98 | if (demoted) this.probationaryLRU.put(demoted.key, demoted.value); 99 | } 100 | 101 | class Node { 102 | constructor (key, value) { 103 | this.key = key; 104 | this.value = value; 105 | this.next = this.prev = null; 106 | } 107 | } 108 | 109 | function LRUCache(capacity) { 110 | this.capacity = capacity; 111 | this.currentSize = 0; 112 | // node hash for cache lookup and storage 113 | this.nodeHash = new Map(); 114 | 115 | // doubly-linked list to keep track of recency and handle eviction 116 | this.head = new Node('head', null); 117 | this.tail = new Node('tail', null); 118 | this.head.next = this.tail; 119 | this.tail.prev = this.head; 120 | } 121 | 122 | LRUCache.prototype.removeNode = function (node) { 123 | const prev = node.prev; 124 | const next = node.next; 125 | prev.next = next; 126 | next.prev = prev; 127 | }; 128 | 129 | 130 | LRUCache.prototype.addNode = function (node) { 131 | const tempTail = this.tail.prev; 132 | tempTail.next = node; 133 | 134 | this.tail.prev = node; 135 | node.next = this.tail; 136 | node.prev = tempTail; 137 | } 138 | 139 | // Like get, but doesn't update anything 140 | LRUCache.prototype.peek = function(key) { 141 | const node = this.nodeHash.get(key); 142 | if (!node) return null; 143 | return node.value; 144 | } 145 | 146 | // Like removeNode, but takes key and deletes from hash 147 | LRUCache.prototype.delete = function (key) { 148 | const node = this.nodeHash.get(key); 149 | const prev = node.prev; 150 | const next = node.next; 151 | prev.next = next; 152 | next.prev = prev; 153 | this.nodeHash.delete(key); 154 | } 155 | 156 | LRUCache.prototype.get = function(key) { 157 | const node = this.nodeHash.get(key); 158 | 159 | // check if node does not exist in nodeHash obj 160 | if (!node) return null; 161 | // update position to most recent in list 162 | this.removeNode(node); 163 | this.addNode(node); 164 | return node.value; 165 | } 166 | 167 | // used by wTinyLFU to get SLRU eviction candidates for TinyLFU decision 168 | LRUCache.prototype.getCandidate = function () { 169 | const tempHead = this.head.next; 170 | this.removeNode(tempHead); 171 | this.nodeHash.delete(tempHead.key); 172 | return {key: tempHead.key, value: tempHead.value}; 173 | } 174 | 175 | LRUCache.prototype.put = function (key, value) { 176 | // create a new node 177 | const newNode = new Node(key, value); 178 | 179 | // remove node from old position 180 | const node = this.nodeHash.get(key); 181 | if (node) this.removeNode(node); 182 | 183 | // add new node to tail 184 | this.addNode(newNode); 185 | this.nodeHash.set(key, newNode); 186 | 187 | // check capacity - if over capacity, remove and reassign head node 188 | if (this.nodeHash.size > this.capacity){ 189 | const tempHead = this.head.next; 190 | this.removeNode(tempHead); 191 | this.nodeHash.delete(tempHead.key); 192 | // return tempHead for use in w-TinyLFU's SLRU cache 193 | return {key: tempHead.key, value: tempHead.value}; 194 | } 195 | } 196 | 197 | -------------------------------------------------------------------------------- /src/quickCache.js: -------------------------------------------------------------------------------- 1 | /** @format */ 2 | 3 | import "https://deno.land/x/dotenv@v3.2.2/load.ts"; 4 | import { connect } from "https://deno.land/x/redis@v0.29.2/mod.ts"; 5 | import { gql } from "https://deno.land/x/oak_graphql@0.6.4/mod.ts"; 6 | import { print, visit } from "https://deno.land/x/graphql_deno@v15.0.0/mod.ts"; 7 | import { destructureQueries } from './Browser/destructure.js'; 8 | 9 | 10 | export class Cache { 11 | constructor( 12 | initialCache = { 13 | ROOT_QUERY: {}, 14 | ROOT_MUTATION: {}, 15 | } 16 | ) { 17 | this.ROOT_QUERY = initialCache.ROOT_QUERY; 18 | this.ROOT_MUTATION = initialCache.ROOT_MUTATION; 19 | } 20 | 21 | // METHOD TO CONNECT TO CACHE 22 | async connect(port, policy, maxmemory) { 23 | this.redis = await connect({ 24 | hostname: Deno.env.get('REDIS_HOST'), 25 | port: port, 26 | }); 27 | console.log('connecting to redis'); 28 | this.cacheClear(); 29 | this.redis.configSet('maxmemory-policy', policy); 30 | this.redis.configSet('maxmemory', maxmemory); 31 | } 32 | 33 | // METHOD TO READ FROM REDIS CACHE & RESTRUCTURE THE DATA 34 | async read(queryStr) { 35 | // destructure the query string into an object 36 | const queries = destructureQueries(queryStr).queries; 37 | if (!queries) return; 38 | const responseObject = {}; 39 | // iterate through each query in the input object 40 | for (const query in queries) { 41 | const queryHash = queries[query].name.concat(queries[query].arguments); 42 | if (this.ROOT_QUERY[queryHash]) { 43 | const hashArray = this.ROOT_QUERY[queryHash]; 44 | const respObjProp = queries[query].alias ?? queries[query].name; 45 | // invoke populateAllHashes to add data object to the response object 46 | responseObject[respObjProp] = await this.populateAllHashes(hashArray, queries[query].fields); 47 | if (!responseObject[respObjProp]) return; 48 | } else { 49 | return null; 50 | } 51 | } 52 | return { data: responseObject }; 53 | } 54 | 55 | populateAllHashes(allHashes, fields){ 56 | if (!allHashes.length) return []; 57 | const tildeInd = allHashes[0].indexOf('~'); 58 | const typeName = allHashes[0].slice(0, tildeInd); 59 | const reduction = allHashes.reduce(async (acc, hash) => { 60 | const readStr = await this.redis.get(hash); 61 | const readVal = await JSON.parse(readStr); 62 | if (!readVal) return; 63 | const dataObj = {}; 64 | // iterate over the fields object to populate with data from cache 65 | for (const field in fields) { 66 | if (typeof fields[field] !== 'object') { 67 | if (field === '__typename') { 68 | dataObj[field] = typeName; 69 | } else { 70 | dataObj[field] = readVal[field] || 'n/a'; 71 | } 72 | } else { 73 | // if the field from the input query is an array of hashes, recursively invoke 74 | dataObj[field] = await this.populateAllHashes(readVal[field], fields[field]); 75 | if (dataObj[field] === undefined) return; 76 | } 77 | } 78 | // at this point acc should be an array of response objects for each hash 79 | const resolvedProm = await Promise.resolve(acc); 80 | resolvedProm.push(dataObj); 81 | return resolvedProm; 82 | }, []); 83 | return reduction; 84 | }; 85 | 86 | // METHOD TO WRITE TO REDIS CACHE 87 | async write(queryStr, respObj, searchTerms, deleteFlag) { 88 | const hash = this.createQueryKey(queryStr); 89 | const array = Object.keys(respObj); 90 | // isolate type of of query - 'person,' 'book,' etc. 91 | const tildeInd = array[0].indexOf('~'); 92 | const typeName = array[0].slice(0, tildeInd); 93 | // store the array of keys to ROOT_QUERY 94 | this.ROOT_QUERY[hash] = array; 95 | // write each item in the array to the cache 96 | for (let i = 0; i < array.length; i++) { 97 | await this.redis.set(array[i], JSON.stringify(respObj[array[i]])); 98 | // if using searchTerms, iterate throuogh those and also store each item 99 | // according to those terms in ROOT_QUERY 100 | if (searchTerms.length && queryStr.slice(8 , 11) === 'all') { 101 | searchTerms.forEach(el => { 102 | const elVal = respObj[array[i]][el].replaceAll(' ', ''); 103 | const hashKey = `one${typeName}(${el}:"${elVal}")` 104 | if (!this.ROOT_QUERY[hashKey]) this.ROOT_QUERY[hashKey] = []; 105 | this.ROOT_QUERY[hashKey].push(array[i]); 106 | }) 107 | } 108 | } 109 | } 110 | 111 | 112 | // CURRENTLY BEING UTILIZED BY invalidateCacheCheck.ts, WHICH IS A FILE THAT SHOULD BE REFACTORED IN FUTURE ITERATION 113 | cacheWriteObject = async (hash, obj) => { 114 | let entries = Object.entries(obj).flat(); 115 | entries = entries.map((entry) => JSON.stringify(entry)); 116 | // adding as nested strings? take out one layer for clarity. 117 | await this.redis.hset(hash, ...entries); 118 | }; 119 | 120 | // CURRENTLY BEING UTILIZED BY invalidateCacheCheck.ts, WHICH IS A FILE THAT SHOULD BE REFACTORED IN FUTURE ITERATION 121 | cacheReadObject = async (hash, fields = []) => { 122 | // Checks for the fields requested, then queries cache for those specific keys in the hashes 123 | if (fields.length !== 0) { 124 | const fieldObj = {}; 125 | for (const field of fields) { 126 | const rawCacheValue = await this.redisdb.hget(hash, JSON.stringify(field)); 127 | fieldObj[field] = JSON.parse(rawCacheValue); 128 | } 129 | return fieldObj; 130 | } else { 131 | let objArray = await this.redisdb.hgetall(hash); 132 | if (objArray.length == 0) return undefined; 133 | let parsedArray = objArray.map((entry) => JSON.parse(entry)); 134 | 135 | if (parsedArray.length % 2 !== 0) { 136 | return undefined; 137 | } 138 | let returnObj = {}; 139 | for (let i = 0; i < parsedArray.length; i += 2) { 140 | returnObj[parsedArray[i]] = parsedArray[i + 1]; 141 | } 142 | 143 | return returnObj; 144 | } 145 | }; 146 | 147 | /* 148 | Creates a string to search the cache or add as a key in the cache. 149 | */ 150 | createQueryKey(queryStr) { 151 | // traverses AST and gets object name, and any filter keys in the query 152 | const ast = gql(queryStr); 153 | const tableName = ast.definitions[0].selectionSet.selections[0].name.value; 154 | let queryKey = `${tableName}`; 155 | 156 | if (ast.definitions[0].operation === 'mutation') return queryKey; 157 | if (ast.definitions[0].selectionSet.selections[0].arguments.length) { 158 | const fieldsArray = 159 | ast.definitions[0].selectionSet.selections[0].arguments; 160 | const resultsObj = {}; 161 | fieldsArray.forEach((el) => { 162 | const name = el.name.value; 163 | const value = el.value.value; 164 | resultsObj[name] = value; 165 | }); 166 | 167 | let parens = '' // name:"Yoda" 168 | for (const key in resultsObj) { 169 | parens += `${key}:"${resultsObj[key]}"`; 170 | } 171 | queryKey = queryKey + '(' + parens + ')'; 172 | } 173 | return queryKey; 174 | } 175 | 176 | async cacheDelete(hash) { 177 | await this.redis.del(hash); 178 | } 179 | 180 | async cacheClear() { 181 | await this.redis.flushdb((err, successful) => { 182 | if (err) console.log('redis error', err); 183 | console.log(successful, 'clear'); 184 | }); 185 | } 186 | 187 | // functionality to stop polling 188 | stopPollInterval(interval) { 189 | clearInterval(interval); 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /src/Obsidian.ts: -------------------------------------------------------------------------------- 1 | import { graphql } from 'https://cdn.pika.dev/graphql@15.0.0'; 2 | import { renderPlaygroundPage } from 'https://deno.land/x/oak_graphql@0.6.2/graphql-playground-html/render-playground-html.ts'; 3 | import { makeExecutableSchema } from 'https://deno.land/x/oak_graphql@0.6.2/graphql-tools/schema/makeExecutableSchema.ts'; 4 | import { Cache } from './quickCache.js'; 5 | import queryDepthLimiter from './DoSSecurity.ts'; 6 | import { restructure } from './restructure.ts'; 7 | import { normalizeObject } from './normalize.ts'; 8 | import { isMutation, invalidateCache } from './invalidateCacheCheck.ts'; 9 | import { mapSelectionSet } from './mapSelections.js'; 10 | import { HashTable } from './queryHash.js'; 11 | 12 | interface Constructable { 13 | new (...args: any): T & OakRouter; 14 | } 15 | 16 | interface OakRouter { 17 | post: any; 18 | get: any; 19 | obsidianSchema?: any; 20 | } 21 | 22 | export interface ObsidianRouterOptions { 23 | Router: Constructable; 24 | path?: string; 25 | typeDefs: any; 26 | resolvers: ResolversProps; 27 | context?: (ctx: any) => any; 28 | usePlayground?: boolean; 29 | useCache?: boolean; 30 | redisPort?: number; 31 | policy?: string; 32 | maxmemory?: string; 33 | searchTerms?: string[]; 34 | persistQueries?: boolean; 35 | hashTableSize?: number; 36 | maxQueryDepth?: number; 37 | customIdentifier?: string[]; 38 | mutationTableMap?: Record; // Deno recommended type name 39 | } 40 | 41 | export interface ResolversProps { 42 | Query?: any; 43 | Mutation?: any; 44 | [dynamicProperty: string]: any; 45 | } 46 | 47 | // Export developer chosen port for redis database connection // 48 | export let redisPortExport: number = 6379; 49 | 50 | // tentative fix to get invalidateCacheCheck.ts access to the cache; 51 | export const scope: Record = {}; 52 | 53 | /** 54 | * 55 | * @param param0 56 | * @returns 57 | */ 58 | export async function ObsidianRouter({ 59 | Router, 60 | path = '/graphql', 61 | typeDefs, 62 | resolvers, 63 | context, 64 | usePlayground = false, 65 | useCache = true, // default to true 66 | redisPort = 6379, 67 | policy = 'allkeys-lru', 68 | maxmemory = '2000mb', 69 | searchTerms = [], // Developer can pass in array of search categories 70 | persistQueries = false, // default to false 71 | hashTableSize = 16, // default to 16 72 | maxQueryDepth = 0, 73 | customIdentifier = ['__typename', '_id'], 74 | mutationTableMap = {}, // Developer passes in object where keys are add mutations and values are arrays of affected tables 75 | }: ObsidianRouterOptions): Promise { 76 | const router = new Router(); 77 | const schema = makeExecutableSchema({ typeDefs, resolvers }); 78 | 79 | let cache, hashTable; 80 | if (useCache) { 81 | cache = new Cache(); 82 | scope.cache = cache; 83 | cache.connect(redisPort, policy, maxmemory); 84 | } 85 | if (persistQueries) { 86 | hashTable = new HashTable(hashTableSize); 87 | } 88 | 89 | //post 90 | await router.post(path, async (ctx: any) => { 91 | 92 | const { response, request } = ctx; 93 | if (!request.hasBody) return; 94 | 95 | try { 96 | let queryStr; 97 | let body = await request.body().value; 98 | if (persistQueries && body.hash && !body.query) { 99 | const { hash } = body; 100 | queryStr = hashTable.get(hash); 101 | // if not found in hash table, respond so we can send full query. 102 | if (!queryStr) { 103 | response.status = 204; 104 | return; 105 | } 106 | } else if (persistQueries && body.hash && body.query) { 107 | const { hash, query } = body; 108 | hashTable.add(hash, query); 109 | queryStr = query; 110 | } else if (persistQueries && !body.hash) { 111 | throw new Error('Unable to process request because hashed query was not provided'); 112 | } else if (!persistQueries) { 113 | queryStr = body.query; 114 | } else { 115 | throw new Error('Unable to process request because query argument not provided'); 116 | } 117 | 118 | const contextResult = context ? await context(ctx) : undefined; 119 | // const selectedFields = mapSelectionSet(queryStr); // Gets requested fields from query and saves into an array 120 | if (maxQueryDepth) queryDepthLimiter(queryStr, maxQueryDepth); // If a securty limit is set for maxQueryDepth, invoke queryDepthLimiter, which throws error if query depth exceeds maximum 121 | let restructuredBody = { query: restructure({query: queryStr}) }; // Restructure gets rid of variables and fragments from the query 122 | 123 | // IF WE ARE USING A CACHE 124 | if (useCache) { 125 | 126 | let cacheQueryValue = await cache.read(queryStr); // Parses query string into query key and checks cache for that key 127 | 128 | // ON CACHE MISS 129 | if (!cacheQueryValue) { 130 | // QUERY THE DATABASE 131 | const gqlResponse = await (graphql as any)( 132 | schema, 133 | queryStr, 134 | resolvers, 135 | contextResult, 136 | body.variables || undefined, 137 | body.operationName || undefined 138 | ); 139 | 140 | // customIdentifier is a default param for Obsidian Router - defaults to ['__typename', '_id] 141 | const normalizedGQLResponse = normalizeObject( // Recursively flattens an arbitrarily nested object into an objects with hash key and hashable object pairs 142 | gqlResponse, 143 | customIdentifier 144 | ); 145 | 146 | // If operation is mutation, invalidate relevant responses in cache 147 | if (isMutation(restructuredBody)) { 148 | invalidateCache(normalizedGQLResponse, queryStr, mutationTableMap); 149 | // ELSE, simply write to the cache 150 | } else { 151 | await cache.write(queryStr, normalizedGQLResponse, searchTerms); 152 | } 153 | // AFTER HANDLING THE CACHE, RETURN THE ORIGINAL RESPONSE 154 | response.status = 200; 155 | response.body = gqlResponse; 156 | return; 157 | // ON CACHE HIT 158 | } else { 159 | response.status = 200; 160 | response.body = cacheQueryValue; // Returns response from cache 161 | return; 162 | } 163 | // IF NOT USING A CACHE 164 | } else { 165 | // DIRECTLY QUERY THE DATABASE 166 | const gqlResponse = await (graphql as any)( 167 | schema, 168 | queryStr, 169 | resolvers, 170 | contextResult, 171 | body.variables || undefined, 172 | body.operationName || undefined 173 | ); 174 | 175 | response.status = 200; 176 | response.body = gqlResponse; // Returns response from database 177 | return; 178 | } 179 | } catch (error) { 180 | response.status = 400; 181 | response.body = { 182 | data: null, 183 | errors: [ 184 | { 185 | message: error.message ? error.message : error, 186 | }, 187 | ], 188 | }; 189 | console.error('Error: ', error.message); 190 | } 191 | }); 192 | 193 | // serve graphql playground 194 | // deno-lint-ignore require-await 195 | await router.get(path, async (ctx: any) => { 196 | const { request, response } = ctx; 197 | if (usePlayground) { 198 | const prefersHTML = request.accepts('text/html'); 199 | const optionsObj: any = { 200 | 'schema.polling.enable': false, // enables automatic schema polling 201 | }; 202 | 203 | if (prefersHTML) { 204 | const playground = renderPlaygroundPage({ 205 | endpoint: request.url.origin + path, 206 | subscriptionEndpoint: request.url.origin, 207 | settings: optionsObj, 208 | }); 209 | response.status = 200; 210 | response.body = playground; 211 | return; 212 | } 213 | } 214 | }); 215 | 216 | return router; 217 | } 218 | -------------------------------------------------------------------------------- /src/Browser/lruBrowserCache.js: -------------------------------------------------------------------------------- 1 | import { plural } from "https://deno.land/x/deno_plural/mod.ts"; 2 | 3 | import normalizeResult from "./normalize.js"; 4 | import destructureQueries from "./destructure.js"; 5 | 6 | class Node { 7 | constructor (key, value) { 8 | this.key = key; 9 | this.value = value; 10 | this.next = this.prev = null; 11 | } 12 | } 13 | 14 | export default function LRUCache(capacity) { 15 | this.capacity = capacity; 16 | this.currentSize = 0; 17 | this.ROOT_QUERY = {}; 18 | this.ROOT_MUTATION = {}; 19 | // node hash for cache lookup and storage 20 | this.nodeHash = new Map(); 21 | 22 | // doubly-linked list to keep track of recency and handle eviction 23 | this.head = new Node('head', null); 24 | this.tail = new Node('tail', null); 25 | this.head.next = this.tail; 26 | this.tail.prev = this.head; 27 | } 28 | 29 | LRUCache.prototype.removeNode = function (node) { 30 | const prev = node.prev; 31 | const next = node.next; 32 | prev.next = next; 33 | next.prev = prev; 34 | }; 35 | 36 | 37 | LRUCache.prototype.addNode = function (node) { 38 | const tempTail = this.tail.prev; 39 | tempTail.next = node; 40 | 41 | this.tail.prev = node; 42 | node.next = this.tail; 43 | node.prev = tempTail; 44 | } 45 | 46 | LRUCache.prototype.get = function(key) { 47 | const node = this.nodeHash.get(key); 48 | 49 | // check if node does not exist in nodeHash obj 50 | if (!node) return null; 51 | 52 | this.removeNode(node); 53 | this.addNode(node); 54 | return node.value; 55 | } 56 | 57 | LRUCache.prototype.put = function (key, value) { 58 | // remove node from old position 59 | const node = this.nodeHash.get(key); 60 | if (node) this.removeNode(node); 61 | 62 | // create new node and add to tail 63 | const newNode = new Node(key, value); 64 | this.addNode(newNode); 65 | this.nodeHash.set(key, newNode); 66 | 67 | // check capacity - if over capacity, remove and reassign head node 68 | if (this.nodeHash.get(key).size > this.capacity){ 69 | const tempHead = this.head.next; 70 | this.removeNode(tempHead); 71 | this.nodeHash.delete(tempHead.key); 72 | } 73 | } 74 | 75 | // read from the cache and generate a response object to be populated with values from cache 76 | LRUCache.prototype.read = async function (queryStr) { 77 | if (typeof queryStr !== "string") throw TypeError("input should be a string"); 78 | // destructure the query string into an object 79 | const queries = destructureQueries(queryStr).queries; 80 | // breaks out of function if queryStr is a mutation 81 | if (!queries) return undefined; 82 | const responseObject = {}; 83 | // iterate through each query in the input queries object 84 | for (const query in queries) { 85 | // get the entire str query from the name input query and arguments 86 | const queryHash = queries[query].name.concat(queries[query].arguments); 87 | const rootQuery = this.ROOT_QUERY; 88 | // match in ROOT_QUERY 89 | if (rootQuery[queryHash]) { 90 | // get the hashs to populate from the existent query in the cache 91 | const arrayHashes = rootQuery[queryHash]; 92 | // Determines responseObject property labels - use alias if applicable, otherwise use name 93 | const respObjProp = queries[query].alias ?? queries[query].name; 94 | // invoke populateAllHashes and add data objects to the response object for each input query 95 | responseObject[respObjProp] = await this.populateAllHashes( 96 | arrayHashes, 97 | queries[query].fields 98 | ); 99 | 100 | 101 | if (!responseObject[respObjProp]) return undefined; 102 | 103 | // no match with ROOT_QUERY return null or ... 104 | } else { 105 | return null; 106 | } 107 | } 108 | return { data: responseObject }; 109 | }; 110 | 111 | LRUCache.prototype.write = async function (queryStr, respObj, searchTerms, deleteFlag) { 112 | let nullFlag = false; 113 | let deleteMutation = ""; 114 | for(const query in respObj.data) { 115 | if(respObj.data[query] === null) nullFlag = true 116 | else if(query.toLowerCase().includes('delete')) deleteMutation = labelId(respObj.data[query]); 117 | } 118 | if(!nullFlag) { 119 | const queryObj = destructureQueries(queryStr); 120 | const resFromNormalize = normalizeResult(queryObj, respObj, deleteFlag); 121 | // update the original cache with same reference 122 | for (const hash in resFromNormalize) { 123 | const resp = await this.get(hash); 124 | if (hash === "ROOT_QUERY" || hash === "ROOT_MUTATION") { 125 | if(deleteMutation === "") { 126 | this[hash] = Object.assign(this[hash], resFromNormalize[hash]); 127 | } else { 128 | const typeName = deleteMutation.slice(0, deleteMutation.indexOf('~')); 129 | for(const key in this.ROOT_QUERY) { 130 | if(key.includes(typeName + 's') || key.includes(plural(typeName))) { 131 | for(let i = 0; i < this.ROOT_QUERY[key].length; i++) { 132 | if(this.ROOT_QUERY[key][i] === deleteMutation) { 133 | this.ROOT_QUERY[key].splice(i, 1); 134 | i--; 135 | } 136 | } 137 | } 138 | } 139 | } 140 | } else if (resFromNormalize[hash] === "DELETED") { 141 | // Should we delete directly or do we still need to flag as DELETED 142 | await this.put(hash, "DELETED"); 143 | } else if (resp) { 144 | const newObj = Object.assign(resp, resFromNormalize[hash]); 145 | await this.put(hash, newObj); 146 | } else { 147 | const typeName = hash.slice(0, hash.indexOf('~')); 148 | await this.put(hash, resFromNormalize[hash]); 149 | for(const key in this.ROOT_QUERY) { 150 | if(key.includes(typeName + 's') || key.includes(plural(typeName))) { 151 | this.ROOT_QUERY[key].push(hash); 152 | } 153 | } 154 | /**** 155 | * if search terms were provided in the wrapper and the query is an 156 | * "all"-type query, build out queries in ROOT_QUERY that match the 157 | * search terms for each item retrieved from the "all"-type query so 158 | * that future single queries can be looked up directly from the cache 159 | ****/ 160 | if (searchTerms && queryStr.slice(8, 11) === 'all'){ 161 | searchTerms.forEach(el => { 162 | const elVal = resFromNormalize[hash][el].replaceAll(' ', ''); 163 | const hashKey = `one${typeName}(${el}:"${elVal}")`; 164 | if (!this.ROOT_QUERY[hashKey]) this.ROOT_QUERY[hashKey] = []; 165 | this.ROOT_QUERY[hashKey].push(hash); 166 | }); 167 | } 168 | } 169 | } 170 | } 171 | }; 172 | 173 | function labelId(obj) { 174 | const id = obj.id || obj.ID || obj._id || obj._ID || obj.Id || obj._Id; 175 | return obj.__typename + "~" + id; 176 | } 177 | 178 | // fills in placeholder data in response object with values found in cache 179 | LRUCache.prototype.populateAllHashes = function ( 180 | allHashesFromQuery, 181 | fields 182 | ) { 183 | if (!allHashesFromQuery.length) return []; 184 | const hyphenIdx = allHashesFromQuery[0].indexOf("~"); 185 | const typeName = allHashesFromQuery[0].slice(0, hyphenIdx); 186 | const reduction = allHashesFromQuery.reduce(async (acc, hash) => { 187 | // for each hash from the input query, build the response object 188 | const readVal = await this.get(hash); 189 | if (readVal === "DELETED") return acc; 190 | if (!readVal) return undefined; 191 | const dataObj = {}; 192 | for (const field in fields) { 193 | if (readVal[field] === "DELETED") continue; 194 | // for each field in the fields input query, add the corresponding value from the cache if the field is not another array of hashs 195 | if (readVal[field] === undefined && field !== "__typename") { 196 | return undefined; 197 | } 198 | if (typeof fields[field] !== "object") { 199 | // add the typename for the type 200 | if (field === "__typename") { 201 | dataObj[field] = typeName; 202 | } else dataObj[field] = readVal[field]; 203 | } else { 204 | // case where the field from the input query is an array of hashes, recursively invoke populateAllHashes 205 | dataObj[field] = await this.populateAllHashes( 206 | readVal[field], 207 | fields[field] 208 | ); 209 | if (dataObj[field] === undefined) return undefined; 210 | } 211 | } 212 | // acc is an array of response object for each hash 213 | const resolvedProm = await Promise.resolve(acc); 214 | resolvedProm.push(dataObj); 215 | return resolvedProm; 216 | }, []); 217 | return reduction; 218 | }; 219 | -------------------------------------------------------------------------------- /test_files/test_variables/browserNormalize_variables.ts: -------------------------------------------------------------------------------- 1 | export const test = { 2 | queryObject1: { 3 | queries: [ 4 | { 5 | __typename: 'meta', 6 | name: 'movies', 7 | arguments: '(input:{genre:ACTION})', 8 | fields: { 9 | __typename: 'meta', 10 | id: 'scalar', 11 | title: 'scalar', 12 | genre: 'scalar', 13 | actors: { id: 'scalar', firstName: 'scalar', lastName: 'scalar' }, 14 | }, 15 | }, 16 | { 17 | __typename: 'meta', 18 | name: 'actors', 19 | arguments: '', 20 | fields: { 21 | id: 'scalar', 22 | firstName: 'scalar', 23 | lastName: 'scalar', 24 | films: { 25 | __typename: 'meta', 26 | id: 'scalar', 27 | title: 'scalar', 28 | }, 29 | }, 30 | }, 31 | ], 32 | }, 33 | 34 | resultObject1: { 35 | data: { 36 | movies: [ 37 | { 38 | __typename: 'Movie', 39 | id: '1', 40 | title: 'Indiana Jones and the Last Crusade', 41 | genre: 'ACTION', 42 | actors: [ 43 | { 44 | __typename: 'Actor', 45 | id: '1', 46 | firstName: 'Harrison', 47 | lastName: 'Ford', 48 | }, 49 | { 50 | __typename: 'Actor', 51 | id: '2', 52 | firstName: 'Sean', 53 | lastName: 'Connery', 54 | }, 55 | ], 56 | }, 57 | { 58 | __typename: 'Movie', 59 | id: '4', 60 | title: 'Air Force One', 61 | genre: 'ACTION', 62 | actors: [ 63 | { 64 | __typename: 'Actor', 65 | id: '1', 66 | firstName: 'Harrison', 67 | lastName: 'Ford', 68 | }, 69 | { 70 | __typename: 'Actor', 71 | id: '5', 72 | firstName: 'Gary', 73 | lastName: 'Oldman', 74 | }, 75 | ], 76 | }, 77 | ], 78 | actors: [ 79 | { 80 | __typename: 'Actor', 81 | id: '1', 82 | firstName: 'Harrison', 83 | lastName: 'Ford', 84 | films: [ 85 | { 86 | __typename: 'Movie', 87 | id: '1', 88 | title: 'Indiana Jones and the Last Crusade', 89 | }, 90 | { 91 | __typename: 'Movie', 92 | id: '2', 93 | title: 'Empire Strikes Back', 94 | }, 95 | { 96 | __typename: 'Movie', 97 | id: '3', 98 | title: 'Witness', 99 | }, 100 | { 101 | __typename: 'Movie', 102 | id: '4', 103 | title: 'Air Force One', 104 | }, 105 | ], 106 | }, 107 | { 108 | __typename: 'Actor', 109 | id: '2', 110 | firstName: 'Sean', 111 | lastName: 'Connery', 112 | films: [ 113 | { 114 | __typename: 'Movie', 115 | id: '1', 116 | title: 'Indiana Jones and the Last Crusade', 117 | }, 118 | ], 119 | }, 120 | { 121 | __typename: 'Actor', 122 | id: '3', 123 | firstName: 'Mark', 124 | lastName: 'Hamill', 125 | films: [ 126 | { 127 | __typename: 'Movie', 128 | id: '2', 129 | title: 'Empire Strikes Back', 130 | }, 131 | ], 132 | }, 133 | { 134 | __typename: 'Actor', 135 | id: '4', 136 | firstName: 'Patti', 137 | lastName: 'LuPone', 138 | films: [ 139 | { 140 | __typename: 'Movie', 141 | id: '3', 142 | title: 'Witness', 143 | }, 144 | ], 145 | }, 146 | { 147 | __typename: 'Actor', 148 | id: '5', 149 | firstName: 'Gary', 150 | lastName: 'Oldman', 151 | films: [ 152 | { 153 | __typename: 'Movie', 154 | id: '4', 155 | title: 'Air Force One', 156 | }, 157 | ], 158 | }, 159 | ], 160 | }, 161 | }, 162 | resultObj1: { 163 | ROOT_QUERY: { 164 | 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'], //"hero(episode:EMPIRE)": ["Hero~1"] OR /"hero": ["Hero~1", "Hero~2"] 165 | actors: ['Actor~1', 'Actor~2', 'Actor~3', 'Actor~4', 'Actor~5'], //"hero(episode:JEDI)": ["Hero~2"] 166 | }, 167 | 'Movie~1': { 168 | id: '1', 169 | title: 'Indiana Jones and the Last Crusade', 170 | actors: ['Actor~1', 'Actor~2'], 171 | genre: 'ACTION', 172 | }, 173 | 'Movie~2': { 174 | id: '2', 175 | title: 'Empire Strikes Back', 176 | }, 177 | 'Movie~3': { 178 | id: '3', 179 | title: 'Witness', 180 | }, 181 | 'Movie~4': { 182 | id: '4', 183 | title: 'Air Force One', 184 | actors: ['Actor~1', 'Actor~5'], 185 | genre: 'ACTION', 186 | }, 187 | 'Actor~1': { 188 | id: '1', 189 | firstName: 'Harrison', 190 | lastName: 'Ford', 191 | films: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'], 192 | }, 193 | 'Actor~2': { 194 | id: '2', 195 | firstName: 'Sean', 196 | lastName: 'Connery', 197 | films: ['Movie~1'], 198 | }, 199 | 'Actor~3': { 200 | id: '3', 201 | firstName: 'Mark', 202 | lastName: 'Hamill', 203 | films: ['Movie~2'], 204 | }, 205 | 'Actor~4': { 206 | id: '4', 207 | firstName: 'Patti', 208 | lastName: 'LuPone', 209 | films: ['Movie~3'], 210 | }, 211 | 'Actor~5': { 212 | id: '5', 213 | firstName: 'Gary', 214 | lastName: 'Oldman', 215 | films: ['Movie~4'], 216 | }, 217 | }, 218 | hashInput1: { 219 | __typename: 'Movie', 220 | id: '1', 221 | title: 'Indiana Jones and the Last Crusade', 222 | genre: 'ACTION', 223 | actors: [ 224 | { 225 | __typename: 'Actor', 226 | id: '1', 227 | firstName: 'Harrison', 228 | lastName: 'Ford', 229 | friends: [ 230 | { 231 | __typename: 'Actor', 232 | id: '6', 233 | firstName: 'Fred', 234 | favHobby: 'sleeping', 235 | }, 236 | { 237 | __typename: 'Actor', 238 | id: '7', 239 | firstName: 'Gary', 240 | favHobby: 'climbing', 241 | }, 242 | { 243 | __typename: 'Actor', 244 | id: '2', 245 | firstName: 'Sean', 246 | favHobby: 'fishing', 247 | }, 248 | ], 249 | }, 250 | { 251 | __typename: 'Actor', 252 | id: '2', 253 | firstName: 'Sean', 254 | lastName: 'Connery', 255 | friends: [], 256 | }, 257 | ], 258 | }, 259 | queryInput1: { 260 | queries: { 261 | __typename: 'meta', 262 | name: 'movies', 263 | arguments: '(input:{genre:ACTION})', 264 | fields: { 265 | __typename: 'meta', 266 | id: 'scalar', 267 | title: 'scalar', 268 | genre: 'scalar', 269 | actors: { 270 | id: 'scalar', 271 | firstName: 'scalar', 272 | lastName: 'scalar', 273 | friends: { id: 'scalar', firstName: 'scalar', favHobby: 'scalar' }, 274 | }, 275 | }, 276 | }, 277 | }, 278 | resultInput1: { 279 | 'Movie~1': { 280 | id: '1', 281 | title: 'Indiana Jones and the Last Crusade', 282 | genre: 'ACTION', 283 | actors: ['Actor~1', 'Actor~2'], 284 | }, 285 | 'Actor~1': { 286 | id: '1', 287 | firstName: 'Harrison', 288 | lastName: 'Ford', 289 | friends: ['Actor~6', 'Actor~7', 'Actor~2'], 290 | }, 291 | 'Actor~2': { 292 | id: '2', 293 | firstName: 'Sean', 294 | lastName: 'Connery', 295 | favHobby: 'fishing', 296 | friends: [], 297 | }, 298 | 'Actor~6': { 299 | id: '6', 300 | firstName: 'Fred', 301 | favHobby: 'sleeping', 302 | }, 303 | 'Actor~7': { 304 | id: '7', 305 | firstName: 'Gary', 306 | favHobby: 'climbing', 307 | }, 308 | }, 309 | aliasTestQueryObj: { 310 | queries: [ 311 | { 312 | __typename: 'meta', 313 | name: 'hero', 314 | alias: 'empireHero', 315 | arguments: '(episode:EMPIRE)', 316 | fields: { 317 | __typename: 'meta', 318 | name: 'scalar', 319 | id: 'scalar', 320 | }, 321 | }, 322 | { 323 | __typename: 'meta', 324 | name: 'hero', 325 | alias: 'jediHero', 326 | arguments: '(episode:JEDI)', 327 | fields: { 328 | __typename: 'meta', 329 | name: 'scalar', 330 | id: 'scalar', 331 | }, 332 | }, 333 | ], 334 | }, 335 | aliasTestResult: { 336 | data: { 337 | empireHero: { 338 | __typename: 'Hero', 339 | id: '1', 340 | name: 'Luke Skywalker', 341 | }, 342 | jediHero: { 343 | __typename: 'Hero', 344 | id: '2', 345 | name: 'R2-D2', 346 | }, 347 | }, 348 | }, 349 | aliasTestRootHash: { 350 | 'Hero~1': { 351 | id: '1', 352 | name: 'Luke Skywalker', 353 | }, 354 | 'Hero~2': { 355 | id: '2', 356 | name: 'R2-D2', 357 | }, 358 | ROOT_QUERY: { 359 | 'hero(episode:EMPIRE)': ['Hero~1'], 360 | 'hero(episode:JEDI)': ['Hero~2'], 361 | }, 362 | }, 363 | }; 364 | -------------------------------------------------------------------------------- /src/Browser/wTinyLFUBrowserCache.js: -------------------------------------------------------------------------------- 1 | import { plural } from "https://deno.land/x/deno_plural@2.0.0/mod.ts"; 2 | 3 | import normalizeResult from "./normalize.js"; 4 | import destructureQueries from "./destructure.js"; 5 | import SLRUCache from "./wTinyLFU%20Sub-Caches/slruSub-cache.js" 6 | import LRUCache from "./wTinyLFU%20Sub-Caches/lruSub-cache.js"; 7 | import { FrequencySketch } from './FrequencySketch.js'; 8 | 9 | /***** 10 | * Overall w-TinyLFU Cache 11 | *****/ 12 | export default function WTinyLFUCache (capacity) { 13 | this.capacity = capacity; 14 | this.ROOT_QUERY = {}; 15 | this.ROOT_MUTATION = {}; 16 | this.sketch = new FrequencySketch(); 17 | 18 | // initialize window cache with access to frequency sketch 19 | this.WLRU = new LRUCache(capacity * .01); 20 | this.WLRU.sketch = this.sketch; 21 | // initialize segmented main cache with access to frequency sketch 22 | this.SLRU = new SLRUCache(capacity * .99); 23 | this.SLRU.probationaryLRU.sketch = this.sketch; 24 | this.SLRU.protectedLRU.sketch = this.sketch; 25 | } 26 | 27 | WTinyLFUCache.prototype.putAndPromote = async function (key, value) { 28 | const WLRUCandidate = this.WLRU.put(key, value); 29 | // if adding to the WLRU cache results in an eviction... 30 | if (WLRUCandidate) { 31 | // if the probationary cache is at capacity... 32 | let winner = WLRUCandidate; 33 | if (this.SLRU.probationaryLRU.nodeHash.size >= Math.floor(this.SLRU.probationaryLRU.capacity)) { 34 | // send the last accessed item in the probationary cache to the TinyLFU 35 | const SLRUCandidate = this.SLRU.probationaryLRU.getCandidate(); 36 | // determine which item will improve the hit-ratio most 37 | winner = await this.TinyLFU(WLRUCandidate, SLRUCandidate); 38 | } 39 | // add the winner to the probationary SLRU 40 | this.SLRU.probationaryLRU.put(winner.key, winner.value); 41 | } 42 | } 43 | 44 | // fills in placeholder data in response object with values found in cache 45 | WTinyLFUCache.prototype.populateAllHashes = function ( 46 | allHashesFromQuery, 47 | fields 48 | ) { 49 | if (!allHashesFromQuery.length) return []; 50 | // isolate the type of search from the rest of the hash name 51 | const hyphenIdx = allHashesFromQuery[0].indexOf("~"); 52 | const typeName = allHashesFromQuery[0].slice(0, hyphenIdx); 53 | const reduction = allHashesFromQuery.reduce(async (acc, hash) => { 54 | // for each hash from the input query, build the response object 55 | // first, check the SLRU cache 56 | let readVal = await this.SLRU.get(hash); 57 | // if the hash is not in the SLRU, check the WLRU 58 | if (!readVal) readVal = await this.WLRU.get(hash); 59 | if (readVal === "DELETED") return acc; 60 | if (readVal) this.sketch.increment(JSON.stringify(readVal)); 61 | if (!readVal) return undefined; 62 | const dataObj = {}; 63 | for (const field in fields) { 64 | if (readVal[field] === "DELETED") continue; 65 | // for each field in the fields input query, add the corresponding value from the cache 66 | // if the field is not another array of hashes 67 | if (readVal[field] === undefined && field !== "__typename") { 68 | return undefined; 69 | } 70 | if (typeof fields[field] !== "object") { 71 | // add the typename for the type 72 | if (field === "__typename") { 73 | dataObj[field] = typeName; 74 | } else dataObj[field] = readVal[field]; // assign the value from the cache to the key in the response 75 | } else { 76 | // case where the field from the input query is an array of hashes, recursively invoke populateAllHashes 77 | dataObj[field] = await this.populateAllHashes( 78 | readVal[field], 79 | fields[field] 80 | ); 81 | if (dataObj[field] === undefined) return undefined; 82 | } 83 | } 84 | // acc is an array of response object for each hash 85 | const resolvedProm = await Promise.resolve(acc); 86 | resolvedProm.push(dataObj); 87 | return resolvedProm; 88 | }, []); 89 | return reduction; 90 | }; 91 | 92 | // read from the cache and generate a response object to be populated with values from cache 93 | WTinyLFUCache.prototype.read = async function (queryStr) { 94 | if (typeof queryStr !== "string") throw TypeError("input should be a string"); 95 | // destructure the query string into an object 96 | const queries = destructureQueries(queryStr).queries; 97 | // breaks out of function if queryStr is a mutation 98 | if (!queries) return undefined; 99 | const responseObject = {}; 100 | // iterate through each query in the input queries object 101 | for (const query in queries) { 102 | // get the entire str query from the name input query and arguments 103 | const queryHash = queries[query].name.concat(queries[query].arguments); 104 | const rootQuery = this.ROOT_QUERY; 105 | // match in ROOT_QUERY 106 | if (rootQuery[queryHash]) { 107 | // get the hashes to populate from the existent query in the cache 108 | const arrayHashes = rootQuery[queryHash]; 109 | // Determines responseObject property labels - use alias if applicable, otherwise use name 110 | const respObjProp = queries[query].alias ?? queries[query].name; 111 | // invoke populateAllHashes and add data objects to the response object for each input query 112 | responseObject[respObjProp] = await this.populateAllHashes( 113 | arrayHashes, 114 | queries[query].fields 115 | ); 116 | 117 | if (!responseObject[respObjProp]) return undefined; 118 | 119 | // no match with ROOT_QUERY return null or ... 120 | } else { 121 | return null; 122 | } 123 | } 124 | return { data: responseObject }; 125 | }; 126 | 127 | WTinyLFUCache.prototype.write = async function (queryStr, respObj, searchTerms, deleteFlag) { 128 | let nullFlag = false; 129 | let deleteMutation = ""; 130 | let wasFoundIn = null; 131 | for(const query in respObj.data) { 132 | if(respObj.data[query] === null) nullFlag = true 133 | else if(query.toLowerCase().includes('delete')) deleteMutation = labelId(respObj.data[query]); 134 | } 135 | if(!nullFlag) { 136 | const queryObj = destructureQueries(queryStr); 137 | const resFromNormalize = normalizeResult(queryObj, respObj, deleteFlag); 138 | // update the original cache with same reference 139 | for (const hash in resFromNormalize) { 140 | // first check SLRU 141 | let resp = await this.SLRU.get(hash); 142 | // next, check the window LRU 143 | if (resp) wasFoundIn = 'SLRU' 144 | if (!resp) resp = await this.WLRU.get(hash); 145 | if (resp && !wasFoundIn) wasFoundIn = 'WLRU'; 146 | if (resp) this.sketch.increment(JSON.stringify(resp)); 147 | if (hash === "ROOT_QUERY" || hash === "ROOT_MUTATION") { 148 | if(deleteMutation === "") { 149 | this[hash] = Object.assign(this[hash], resFromNormalize[hash]); 150 | } else { 151 | const typeName = deleteMutation.slice(0, deleteMutation.indexOf('~')); 152 | for(const key in this.ROOT_QUERY) { 153 | if(key.includes(typeName + 's') || key.includes(plural(typeName))) { 154 | for(let i = 0; i < this.ROOT_QUERY[key].length; i++) { 155 | if(this.ROOT_QUERY[key][i] === deleteMutation) { 156 | this.ROOT_QUERY[key].splice(i, 1); 157 | i--; 158 | } 159 | } 160 | } 161 | } 162 | } 163 | } else if (resFromNormalize[hash] === "DELETED") { 164 | // Should we delete directly or do we still need to flag as DELETED 165 | if (wasFoundIn === 'SLRU') await this.SLRU.put(hash, "DELETED"); 166 | else if (wasFoundIn === 'WLRU') await this.WLRU.put(hash, "DELETED"); 167 | } else if (resp) { 168 | const newObj = Object.assign(resp, resFromNormalize[hash]); 169 | // write to the appropriate cache 170 | if (wasFoundIn === 'SLRU') await this.SLRU.put(hash, newObj); 171 | else if (wasFoundIn === 'WLRU') await this.WLRU.put(hash, newObj); 172 | } else { 173 | const typeName = hash.slice(0, hash.indexOf('~')); 174 | await this.putAndPromote(hash, resFromNormalize[hash]); 175 | for(const key in this.ROOT_QUERY) { 176 | if(key.includes(typeName + 's') || key.includes(plural(typeName))) { 177 | this.ROOT_QUERY[key].push(hash); 178 | } 179 | } 180 | /**** 181 | * if search terms were provided in the wrapper and the query is an 182 | * "all"-type query, build out queries in ROOT_QUERY that match the 183 | * search terms for each item retrieved from the "all"-type query so 184 | * that future single queries can be looked up directly from the cache 185 | ****/ 186 | if (searchTerms && queryStr.slice(8, 11) === 'all'){ 187 | searchTerms.forEach(el => { 188 | const elVal = resFromNormalize[hash][el].replaceAll(' ', ''); 189 | const hashKey = `one${typeName}(${el}:"${elVal}")`; 190 | if (!this.ROOT_QUERY[hashKey]) this.ROOT_QUERY[hashKey] = []; 191 | this.ROOT_QUERY[hashKey].push(hash); 192 | }); 193 | } 194 | } 195 | } 196 | } 197 | }; 198 | 199 | // Note: WholeQuery is not a currently-functioning option in Obsidian Wrapper 200 | WTinyLFUCache.prototype.writeWholeQuery = function (queryStr, respObj) { 201 | const hash = queryStr.replace(/\s/g, ""); 202 | this.put(this.ROOT_QUERY[hash], respObj); 203 | return respObj; 204 | }; 205 | 206 | // Note: WholeQuery is not a currently-functioning option in Obsidian Wrapper 207 | WTinyLFUCache.prototype.readWholeQuery = function (queryStr) { 208 | const hash = queryStr.replace(/\s/g, ""); 209 | if (this.ROOT_QUERY[hash]) return this.get(this.ROOT_QUERY[hash]); 210 | return undefined; 211 | }; 212 | 213 | /***** 214 | * TinyLFU Admission Policy 215 | *****/ 216 | WTinyLFUCache.prototype.TinyLFU = async function (WLRUCandidate, SLRUCandidate) { 217 | // get the frequency values of both items 218 | const WLRUFreq = await this.sketch.frequency(JSON.stringify(WLRUCandidate.value)); 219 | const SLRUFreq = await this.sketch.frequency(JSON.stringify(SLRUCandidate.value)); 220 | // return the object with the higher frequency, prioritizing items in the window cache, 221 | return WLRUFreq >= SLRUFreq ? WLRUCandidate : SLRUCandidate; 222 | } -------------------------------------------------------------------------------- /src/Browser/lfuBrowserCache.js: -------------------------------------------------------------------------------- 1 | /** @format */ 2 | import { plural } from "https://deno.land/x/deno_plural@2.0.0/mod.ts"; 3 | 4 | import normalizeResult from "./normalize.js"; 5 | import destructureQueries from "./destructure.js"; 6 | 7 | class Node { 8 | constructor(key, value) { 9 | this.key = key; // 'Actor~1 10 | this.val = value; // {id:1, name:harrison, ....} 11 | this.next = this.prev = null; 12 | this.freq = 1; 13 | } 14 | } 15 | 16 | class DoublyLinkedList { 17 | constructor() { 18 | this.head = new Node(null, null); 19 | this.tail = new Node(null, null); 20 | this.head.next = this.tail; 21 | this.tail.prev = this.head; 22 | } 23 | 24 | insertHead(node) { 25 | node.prev = this.head; 26 | node.next = this.head.next; 27 | this.head.next.prev = node; 28 | this.head.next = node; 29 | } 30 | 31 | removeNode(node) { 32 | const prev = node.prev; 33 | const next = node.next; 34 | prev.next = next; 35 | next.prev = prev; 36 | } 37 | 38 | removeTail() { 39 | const node = this.tail.prev; 40 | this.removeNode(node); 41 | return node.key; 42 | } 43 | 44 | isEmpty() { 45 | return this.head.next.val == null; 46 | } 47 | } 48 | 49 | /** 50 | * @param {number} capacity 51 | */ 52 | export default function LFUCache(capacity) { 53 | this.capacity = capacity; 54 | this.currentSize = 0; 55 | this.leastFreq = 0; 56 | this.ROOT_QUERY = {}; 57 | this.ROOT_MUTATION = {}; 58 | this.nodeHash = new Map(); 59 | this.freqHash = new Map(); 60 | this.callTime = 0 61 | } 62 | 63 | /** 64 | * @param {string} key 65 | * @return {object} 66 | */ 67 | LFUCache.prototype.get = function (key) { 68 | let node = this.nodeHash.get(key); 69 | // if node is not found return undefined so that Obsidian will pull new data from graphQL 70 | if (!node) return undefined; 71 | this.freqHash.get(node.freq).removeNode(node); 72 | if (node.freq == this.leastFreq && this.freqHash.get(node.freq).isEmpty()) 73 | this.leastFreq++; 74 | node.freq++; 75 | // freqHash housekeeping 76 | if (this.freqHash.get(node.freq) == null) 77 | this.freqHash.set(node.freq, new DoublyLinkedList()); 78 | this.freqHash.get(node.freq).insertHead(node); 79 | return node.val; 80 | }; 81 | 82 | /** 83 | * @param {string} key 84 | * @param {object} value 85 | * @return {void} 86 | */ 87 | LFUCache.prototype.put = function (key, value) { 88 | if (this.capacity == 0) return; 89 | let node = this.nodeHash.get(key); 90 | if (!node) { 91 | // new node 92 | this.currentSize++; 93 | if (this.currentSize > this.capacity) { 94 | let tailKey = this.freqHash.get(this.leastFreq).removeTail(); 95 | this.nodeHash.delete(tailKey); 96 | this.currentSize--; 97 | } 98 | let newNode = new Node(key, value); 99 | // freqHash housekeeping 100 | if (this.freqHash.get(1) == null) 101 | this.freqHash.set(1, new DoublyLinkedList()); 102 | this.freqHash.get(1).insertHead(newNode); 103 | 104 | this.nodeHash.set(key, newNode); 105 | this.leastFreq = 1; 106 | } else { 107 | // existed node 108 | node.val = value; 109 | this.freqHash.get(node.freq).removeNode(node); 110 | if (node.freq == this.leastFreq && this.freqHash.get(node.freq).isEmpty()) 111 | this.leastFreq++; 112 | node.freq++; 113 | // freqHash housekeeping 114 | if (this.freqHash.get(node.freq) == null) 115 | this.freqHash.set(node.freq, new DoublyLinkedList()); 116 | this.freqHash.get(node.freq).insertHead(node); 117 | } 118 | }; 119 | 120 | LFUCache.prototype.read = async function (queryStr) { 121 | if (typeof queryStr !== "string") throw TypeError("input should be a string"); 122 | // destructure the query string into an object 123 | const queries = destructureQueries(queryStr).queries; 124 | // breaks out of function if queryStr is a mutation 125 | if (!queries) return undefined; 126 | const responseObject = {}; 127 | // iterate through each query in the input queries object 128 | for (const query in queries) { 129 | // get the entire str query from the name input query and arguments 130 | const queryHash = queries[query].name.concat(queries[query].arguments); 131 | const rootQuery = this.ROOT_QUERY; 132 | // match in ROOT_QUERY 133 | if (rootQuery[queryHash]) { 134 | // get the hashs to populate from the existent query in the cache 135 | const arrayHashes = rootQuery[queryHash]; 136 | // Determines responseObject property labels - use alias if applicable, otherwise use name 137 | const respObjProp = queries[query].alias ?? queries[query].name; 138 | // invoke populateAllHashes and add data objects to the response object for each input query 139 | responseObject[respObjProp] = await this.populateAllHashes( 140 | arrayHashes, 141 | queries[query].fields 142 | ); 143 | 144 | 145 | if (!responseObject[respObjProp]) return undefined; 146 | 147 | // no match with ROOT_QUERY return null or ... 148 | } else { 149 | return null; 150 | } 151 | } 152 | return { data: responseObject }; 153 | }; 154 | 155 | LFUCache.prototype.write = async function (queryStr, respObj, searchTerms, deleteFlag) { 156 | let nullFlag = false; 157 | let deleteMutation = ""; 158 | for(const query in respObj.data) { 159 | if(respObj.data[query] === null) nullFlag = true 160 | else if(query.toLowerCase().includes('delete')) deleteMutation = labelId(respObj.data[query]); 161 | } 162 | if(!nullFlag) { 163 | const queryObj = destructureQueries(queryStr); 164 | const resFromNormalize = normalizeResult(queryObj, respObj, deleteFlag); 165 | // update the original cache with same reference 166 | for (const hash in resFromNormalize) { 167 | const resp = await this.get(hash); 168 | if (hash === "ROOT_QUERY" || hash === "ROOT_MUTATION") { 169 | if(deleteMutation === "") { 170 | this[hash] = Object.assign(this[hash], resFromNormalize[hash]); 171 | } else { 172 | const typeName = deleteMutation.slice(0, deleteMutation.indexOf('~')); 173 | for(const key in this.ROOT_QUERY) { 174 | if(key.includes(typeName + 's') || key.includes(plural(typeName))) { 175 | for(let i = 0; i < this.ROOT_QUERY[key].length; i++) { 176 | if(this.ROOT_QUERY[key][i] === deleteMutation) { 177 | this.ROOT_QUERY[key].splice(i, 1); 178 | i--; 179 | } 180 | } 181 | } 182 | } 183 | } 184 | } else if (resFromNormalize[hash] === "DELETED") { 185 | // Should we delete directly or do we still need to flag as DELETED 186 | await this.put(hash, "DELETED"); 187 | } else if (resp) { 188 | const newObj = Object.assign(resp, resFromNormalize[hash]); 189 | await this.put(hash, newObj); 190 | } else { 191 | const typeName = hash.slice(0, hash.indexOf('~')); 192 | await this.put(hash, resFromNormalize[hash]); 193 | for(const key in this.ROOT_QUERY) { 194 | if(key.includes(typeName + 's') || key.includes(plural(typeName))) { 195 | this.ROOT_QUERY[key].push(hash); 196 | } 197 | } 198 | /**** 199 | * if search terms were provided in the wrapper and the query is an 200 | * "all"-type query, build out queries in ROOT_QUERY that match the 201 | * search terms for each item retrieved from the "all"-type query so 202 | * that future single queries can be looked up directly from the cache 203 | ****/ 204 | if (searchTerms && queryStr.slice(8, 11) === 'all'){ 205 | searchTerms.forEach(el => { 206 | const elVal = resFromNormalize[hash][el].replaceAll(' ', ''); 207 | const hashKey = `one${typeName}(${el}:"${elVal}")`; 208 | if (!this.ROOT_QUERY[hashKey]) this.ROOT_QUERY[hashKey] = []; 209 | this.ROOT_QUERY[hashKey].push(hash); 210 | }); 211 | } 212 | } 213 | } 214 | } 215 | }; 216 | 217 | function labelId(obj) { 218 | const id = obj.id || obj.ID || obj._id || obj._ID || obj.Id || obj._Id; 219 | return obj.__typename + "~" + id; 220 | } 221 | 222 | LFUCache.prototype.cacheDelete = function (hash) { 223 | const node = this.nodeHash.get(hash); 224 | this.freqHash.get(node.freq).removeNode(node); 225 | this.nodeHash.delete(hash); 226 | }; 227 | 228 | LFUCache.prototype.cacheClear = function () { 229 | this.currentSize = 0; 230 | this.leastFreq = 0; 231 | this.ROOT_QUERY = {}; 232 | this.ROOT_MUTATION = {}; 233 | this.nodeHash = new Map(); 234 | this.freqHash = new Map(); 235 | }; 236 | 237 | LFUCache.prototype.writeWholeQuery = function (queryStr, respObj) { 238 | const hash = queryStr.replace(/\s/g, ""); 239 | this.put(this.ROOT_QUERY[hash], respObj); 240 | return respObj; 241 | }; 242 | 243 | LFUCache.prototype.readWholeQuery = function (queryStr) { 244 | const hash = queryStr.replace(/\s/g, ""); 245 | if (this.ROOT_QUERY[hash]) return this.get(this.ROOT_QUERY[hash]); 246 | return undefined; 247 | }; 248 | 249 | LFUCache.prototype.populateAllHashes = function ( 250 | allHashesFromQuery, 251 | fields 252 | ) { 253 | if (!allHashesFromQuery.length) return []; 254 | const hyphenIdx = allHashesFromQuery[0].indexOf("~"); 255 | const typeName = allHashesFromQuery[0].slice(0, hyphenIdx); 256 | const reduction = allHashesFromQuery.reduce(async (acc, hash) => { 257 | // for each hash from the input query, build the response object 258 | const readVal = await this.get(hash); 259 | if (readVal === "DELETED") return acc; 260 | if (!readVal) return undefined; 261 | const dataObj = {}; 262 | for (const field in fields) { 263 | if (readVal[field] === "DELETED") continue; 264 | // for each field in the fields input query, add the corresponding value from the cache if the field is not another array of hashs 265 | if (readVal[field] === undefined && field !== "__typename") { 266 | return undefined; 267 | } 268 | if (typeof fields[field] !== "object") { 269 | // add the typename for the type 270 | if (field === "__typename") { 271 | dataObj[field] = typeName; 272 | } else dataObj[field] = readVal[field]; 273 | } else { 274 | // case where the field from the input query is an array of hashes, recursively invoke populateAllHashes 275 | dataObj[field] = await this.populateAllHashes( 276 | readVal[field], 277 | fields[field] 278 | ); 279 | if (dataObj[field] === undefined) return undefined; 280 | } 281 | } 282 | // acc is an array of response object for each hash 283 | const resolvedProm = await Promise.resolve(acc); 284 | resolvedProm.push(dataObj); 285 | return resolvedProm; 286 | }, []); 287 | return reduction; 288 | }; 289 | -------------------------------------------------------------------------------- /ObsidianWrapper/ObsidianWrapper.jsx: -------------------------------------------------------------------------------- 1 | import * as React from "https://esm.sh/react@18"; 2 | import LFUCache from '../src/Browser/lfuBrowserCache.js'; 3 | import LRUCache from '../src/Browser/lruBrowserCache.js'; 4 | import WTinyLFUCache from "../src/Browser/wTinyLFUBrowserCache.js"; 5 | import { insertTypenames } from '../src/Browser/insertTypenames.js'; 6 | import { sha256 } from 'https://denopkg.com/chiefbiiko/sha256@v1.0.0/mod.ts'; 7 | 8 | const cacheContext = React.createContext(); 9 | 10 | function ObsidianWrapper(props) { 11 | // props to be inputted by user when using the Obsdian Wrapper 12 | const { algo, capacity, searchTerms, useCache, persistQueries } = props; 13 | // if useCache hasn't been set, default caching to true 14 | let caching = true; 15 | // if it has been set to false, turn client-side caching off 16 | if (useCache === false) caching = false; 17 | 18 | // algo defaults to LFU, capacity defaults to 2000 19 | const setAlgoCap = (algo, capacity) => { 20 | let cache; 21 | if(caching && algo === 'LRU'){ 22 | cache = new LRUCache(Number(capacity || 2000)) 23 | } else if (caching && algo === 'W-TinyLFU'){ 24 | cache = new WTinyLFUCache(Number(capacity || 2000)) 25 | } else if (caching) { 26 | cache = new LFUCache(Number(capacity || 2000)) 27 | } 28 | return cache; 29 | } 30 | 31 | // once cache is initialized, cannot setCache 32 | // state for cache is initialized based on developer settings in wrapper 33 | // to successfully change between algo types for testing, kill the server, change the algo type in wrapper, then restart server 34 | const [cache, setCache] = React.useState(setAlgoCap(algo, capacity)); 35 | 36 | // FOR DEVTOOL - listening for message from content.js to be able to send algo type and capacity to devtool 37 | window.addEventListener('message', msg => { 38 | if(msg.data.type === 'algocap'){ 39 | window.postMessage({ 40 | algo: algo ? algo : 'LFU', 41 | capacity: capacity ? capacity : 2000 42 | }) 43 | } 44 | }); 45 | 46 | async function query(query, options = {}) { 47 | // FOR DEVTOOL - startTime is used to calculate the performance of the cache 48 | // startDate is to find out when query was made, this data is passed to devtools 49 | const startTime = Date.now(); 50 | const startDate = new Date(Date.now()); 51 | 52 | // set the options object default properties if not provided 53 | const { 54 | endpoint = '/graphql', 55 | cacheRead = !caching ? false : true, 56 | cacheWrite = !caching ? false : true, 57 | pollInterval = null, 58 | wholeQuery = false, //Note: logic for true is currently nonfunctional 59 | } = options; 60 | 61 | // when pollInterval is not null the query will be sent to the server every inputted number of milliseconds 62 | if (pollInterval) { 63 | const interval = setInterval(() => { 64 | // pass in query() with options instead 65 | new Promise((resolve, reject) => 66 | resolve( 67 | query(query, { pollInterval: null, cacheRead: false, ...options }) 68 | ) 69 | ); 70 | }, pollInterval); 71 | return interval; 72 | } 73 | 74 | // when cacheRead set to true & we are utilizing client side caching 75 | if (cacheRead && caching) { 76 | let resObj; 77 | // when the developer decides to only utilize whole query for cache 78 | if (wholeQuery) resObj = await cache.readWholeQuery(query); 79 | // attempt to read from the cache 80 | else resObj = await cache.read(query); 81 | // check if query is stored in cache 82 | if (resObj) { 83 | // returning cached response as a promise 84 | const cacheHitResponseTime = Date.now() - startTime; 85 | 86 | // FOR DEVTOOL - sends message to content.js with query metrics when query is a hit 87 | window.postMessage({ 88 | type: 'query', 89 | time: cacheHitResponseTime, 90 | date: startDate.toDateString().slice(0, 24), 91 | query: query, 92 | hit: true 93 | }); 94 | 95 | return new Promise((resolve, reject) => resolve(resObj)); 96 | } 97 | // execute graphql fetch request if cache miss 98 | return new Promise((resolve, reject) => resolve(hunt(query))); 99 | } 100 | // when cacheRead set to false & not using client-side cache 101 | if (!cacheRead || !caching) { 102 | return new Promise((resolve, reject) => resolve(hunt(query))); 103 | } 104 | 105 | // function to be called on cache miss or on intervals or not looking in the cache 106 | async function hunt(query) { 107 | if (!wholeQuery) query = insertTypenames(query); 108 | try { 109 | let resJSON; 110 | // IF WE ARE USING PERSIST QUERIES 111 | if (persistQueries) { 112 | // SEND THE HASH 113 | const hash = sha256(query, 'utf8', 'hex'); 114 | resJSON = await fetch(endpoint, { 115 | method: 'POST', 116 | headers: { 117 | 'Content-Type': 'application/json', 118 | Accept: 'application/json', 119 | }, 120 | body: JSON.stringify({ hash }), 121 | }); 122 | 123 | // IF HASH WAS NOT FOUND IN HASH TABLE 124 | if (resJSON.status === 204) { 125 | // SEND NEW REQUEST WITH HASH AND QUERY 126 | resJSON = await fetch(endpoint, { 127 | method: 'POST', 128 | headers: { 129 | 'Content-Type': 'application/json', 130 | Accept: 'application/json', 131 | }, 132 | body: JSON.stringify({ hash, query }), 133 | }); 134 | 135 | } 136 | 137 | // IF WE ARE NOT USING PERSIST QUERIES 138 | } else { 139 | // JUST SEND THE QUERY ONLY 140 | resJSON = await fetch(endpoint, { 141 | method: 'POST', 142 | headers: { 143 | 'Content-Type': 'application/json', 144 | Accept: 'application/json', 145 | }, 146 | body: JSON.stringify({ query }), 147 | }); 148 | } 149 | 150 | const resObj = await resJSON.json(); 151 | const deepResObj = { ...resObj }; 152 | // update result in cache if cacheWrite is set to true 153 | if (cacheWrite && caching && resObj.data[Object.keys(resObj.data)[0]] !== null) { 154 | if (wholeQuery) cache.writeWholeQuery(query, deepResObj); 155 | else if(resObj.data[Object.keys(resObj.data)[0]].length > cache.capacity) console.log('Please increase cache capacity'); 156 | else cache.write(query, deepResObj, searchTerms); 157 | } 158 | const cacheMissResponseTime = Date.now() - startTime; 159 | 160 | // FOR DEVTOOL - sends message to content.js when query is a miss 161 | window.postMessage({ 162 | type: 'query', 163 | time: cacheMissResponseTime, 164 | date: startDate.toDateString().slice(0, 24), 165 | query: query, 166 | hit: false 167 | }); 168 | 169 | return resObj; 170 | } catch (e) { 171 | console.log(e); 172 | } 173 | } 174 | } 175 | 176 | // Function to clear cache and session storage 177 | function clearCache() { 178 | cache.cacheClear(); 179 | } 180 | 181 | // NOTE - FOR DEVTOOL - no messages are currently being passed for mutations 182 | // so some logic in content.js and background.js may be missing to handle mutations 183 | 184 | // breaking out writethrough logic vs. non-writethrough logic 185 | async function mutate(mutation, options = {}) { 186 | const startTime = Date.now(); 187 | mutation = insertTypenames(mutation); 188 | const { 189 | endpoint = '/graphql', 190 | cacheWrite = !caching ? false : true, 191 | toDelete = false, 192 | update = null, 193 | writeThrough = true, // unsure if boolean is symantically backwards or not 194 | } = options; 195 | try { 196 | if (!writeThrough) { 197 | // if it's a deletion, then delete from cache and return the object 198 | if (toDelete) { 199 | const responseObj = await cache.writeThrough( 200 | mutation, 201 | {}, 202 | true, 203 | endpoint 204 | ); 205 | const deleteMutationResponseTime = Date.now() - startTime; 206 | return responseObj; 207 | } else { 208 | // for add mutation 209 | const responseObj = await cache.writeThrough( 210 | mutation, 211 | {}, 212 | false, 213 | endpoint 214 | ); 215 | // for update mutation 216 | if (update) { 217 | // run the update function 218 | update(cache, responseObj); 219 | } 220 | // always write/over-write to cache (add/update) 221 | // GQL call to make changes and synchronize database 222 | console.log('WriteThrough - false ', responseObj); 223 | const addOrUpdateMutationResponseTime = Date.now() - startTime; 224 | return responseObj; 225 | } 226 | } else { 227 | const responseObj = await fetch(endpoint, { 228 | method: 'POST', 229 | headers: { 230 | 'Content-Type': 'application/json', 231 | Accept: 'application/json', 232 | }, 233 | body: JSON.stringify({ query: mutation }), 234 | }).then((resp) => resp.json()); 235 | if (!cacheWrite || !caching) return responseObj; 236 | // first behaviour when delete cache is set to true 237 | if (toDelete) { 238 | cache.write(mutation, responseObj, searchTerms, true); 239 | return responseObj; 240 | } 241 | // second behaviour if update function provided 242 | if (update) { 243 | update(cache, responseObj); 244 | } 245 | 246 | if(!responseObj.errors) cache.write(mutation, responseObj, searchTerms); 247 | // third behaviour just for normal update (no-delete, no update function) 248 | console.log('WriteThrough - true ', responseObj); 249 | return responseObj; 250 | } 251 | } catch (e) { 252 | console.log(e); 253 | } 254 | } 255 | // Returning Provider React component that allows consuming components to subscribe to context changes 256 | return ( 257 | 261 | ); 262 | } 263 | // Declaration of custom hook to allow access to provider 264 | function useObsidian() { 265 | // React useContext hook to access the global provider by any of the consumed components 266 | return React.useContext(cacheContext); 267 | } 268 | 269 | // Exporting of Custom wrapper and hook to access wrapper cache 270 | export { ObsidianWrapper, useObsidian }; 271 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Obsidian](./assets/bannerfull_gradient.png) 2 | 3 |
GraphQL, built for Deno.
4 | 5 |
6 | 7 |

8 | Obsidian 9 | Tweet 10 |

11 | 12 |
13 | 14 |

15 | GitHub 16 | GitHub issues 17 | GitHub last commit 18 | GitHub Repo stars 19 |

20 | 21 | ## Features 22 | 23 | - (New!) Support for W-TinyLFU client-side cache that brings great hit-ratio performance with minimal memory overhead 24 | - (New!) Option to provide Obsidian with the search types your application uses, allowing data cached from complete dataset pulls to be accessible later on in searches for individual items 25 | - (New!) Refactored server-side caching with Redis 26 | - (New!) Rebuilt developer tool for Obsidian 8.0 for testing and analytics related to the new client caching options 27 | - (New!) Option for persistent queries, allowing only a smaller hash to be sent to the server on client-side cache misses, minimizing the cost of queries. Note that while this will increase the overall performance for frequent, repeat queries. 28 | - Flexible cache responds only with data requested from selected fields 29 | - GraphQL query abstraction and caching improving the performance of your app 30 | - SSR React wrapper, allowing you to cache in browser 31 | - Configurable caching options, giving you complete control over your cache 32 | - Fullstack integration, leveraging client-side and server-side caching to streamline your caching strategy 33 | - Support for the full GraphQL convention 34 | - Support for client-side and server-side cache invalidation 35 | - Optional GraphQL DoS attack mitigation security module 36 | 37 | ## Overview 38 | 39 | Obsidian is Deno's first native GraphQL caching client and server module. Boasting lightning-fast caching and fetching capabilities alongside headlining normalization and rebuilding strategies, Obsidian is equipped to support scalable, highly performant applications. 40 | 41 | With additional support for use in server-side rendered React apps built with Deno, full stack integration of Obsidian enables a fast and flexible caching solution. 42 | 43 | ## Installation 44 | 45 |
QUICK START
46 |
47 | 48 | ## Creating the Router 49 | 50 | ```javascript 51 | import { Application, Router } from 'https://deno.land/x/oak@v6.0.1/mod.ts'; 52 | import { ObsidianRouter, gql } from 'https://deno.land/x/obsidian/mod.ts'; 53 | import { resolvers } from './ import from your resolvers file'; 54 | import { types } from './ import your schema/types from schema/types file'; 55 | 56 | interface ObsRouter extends Router { 57 | obsidianSchema?: any; 58 | } 59 | 60 | const GraphQLRouter = 61 | (await ObsidianRouter) < 62 | ObsRouter > 63 | { 64 | Router, // your router in deno 65 | typeDefs: types, // graphQL typeDefs 66 | resolvers: resolvers, // graphQL resolvers 67 | }; 68 | 69 | // attach the graphql router's routes to your deno app 70 | app.use(GraphQLRouter.routes(), GraphQLRouter.allowedMethods()); 71 | ``` 72 | ## Selecting options for the Router 73 | ```javascript 74 | const GraphQLRouter = 75 | (await ObsidianRouter) < 76 | ObsRouter > 77 | { 78 | Router, // Router that is initialized by server. 79 | path: '/graphql', // endpoint for graphQL queries, default to '/graphql' 80 | typeDefs: types, // graphQL typeDefs 81 | resolvers: resolvers, // graphQL resolvers 82 | usePlayground: true, // Boolean to allow for graphQL playground, default to false 83 | useCache: true, // Boolean to toggle all cache functionality, default to true 84 | redisPort: 6379, // Desired redis port, default to 6379 85 | policy: 'allkeys-lru', // Option select your Redis policy, default to allkeys-lru 86 | maxmemory: '2000mb', // Option to select Redis capacity, default to 2000mb 87 | searchTerms: [] //Optional array to allow broad queries to store according to search fields so individual searches are found in cache 88 | persistQueries: true, //Boolean to toggle the use of persistent queries, default to false - NOTE: if using, must also be enabled in client wrapper 89 | hashTableSize: 16, // Size of hash table for persistent queries, default to 16 90 | maxQueryDepth: 0, // Maximum depth of query, default to 0 91 | customIdentifier: ['__typename', '_id'], // keys to be used to idedntify and normalize object 92 | mutationTableMap: {}, //Object where keys are add mutation types and value is an array of affected tables (e.g. {addPlants: ['plants'], addMovie: ['movies']}) 93 | }; 94 | ``` 95 | 96 | ## Creating the Wrapper 97 | 98 | ```javascript 99 | import { ObsidianWrapper } from 'https://deno.land/x/obsidian/clientMod.ts'; 100 | 101 | const App = () => { 102 | return ( 103 | 104 | 105 | 106 | ); 107 | }; 108 | ``` 109 | 110 | ## Selecting options for the Wrapper 111 | 112 | ```javascript 113 | 120 | 121 | 122 | ``` 123 | 124 | ## Making a Query 125 | 126 | ```javascript 127 | import { useObsidian } from 'https://deno.land/x/obsidian/clientMod.ts'; 128 | 129 | const MovieApp = () => { 130 | const { query } = useObsidian(); 131 | const [movies, setMovies] = (React as any).useState(''); 132 | 133 | const queryStr = `query { 134 | movies { 135 | id 136 | title 137 | releaseYear 138 | genre 139 | } 140 | } 141 | `; 142 | 143 | return ( 144 |

{movies}

145 | 151 | ); 152 | }; 153 | ``` 154 | 155 | ## Making a Mutation 156 | 157 | ```javascript 158 | import { useObsidian } from 'https://deno.land/x/obsidian/clientMod.ts'; 159 | 160 | const MovieApp = () => { 161 | const { mutate } = useObsidian(); 162 | const [movies, setMovies] = (React as any).useState(''); 163 | 164 | const queryStr = `mutation { 165 | addMovie(input: {title: "Cruel Intentions", releaseYear: 1999, genre: "DRAMA" }) { 166 | id 167 | title 168 | releaseYear 169 | genre 170 | } 171 | } 172 | `; 173 | 174 | return ( 175 |

{movies}

176 | 182 | ); 183 | } 184 | ``` 185 | ## Setting up Redis 186 | 187 | In order to utilize server side caching, a Redis instance must be available and running. Redis installation and quick-start documentation can be found [here](https://redis.io/docs/getting-started/). Make sure to keep a redis instance running whenever the application is utilizing server side caching to avoid running into issues. 188 | 189 | To connect Obsidian to Redis, create a .env file in the root directory of the application with the following information: 190 | 191 | ```javascript 192 | REDIS_HOST= //string of redis host name, typically defaulted to '127.0.0.1' by Redis 193 | ``` 194 | Be sure to also specify the Redis TCP port by passing in the port number as an argument into Obsidian Router (see Selecting options for the Router above). 195 | 196 | 197 | ## Documentation 198 | 199 | [getobsidian.io](http://getobsidian.io/) 200 | 201 | ## Developer Tool 202 | 203 | Information and instructions on how to use our developer tool can be found here
204 | works with Obsidian 8.0
205 | [open-source-labs/obsidian-developer-tool](https://github.com/open-source-labs/obsidian-developer-tool) 206 | 207 | ## Obsidian 8.0 Demo 208 | 209 | Github for a demo with some example code to play with:
210 | [oslabs-beta/obsidian-demo-8.0](https://github.com/oslabs-beta/obsidian-8.0-demo) 211 | 212 | ## Features In Progress 213 | 214 | - Server-side caching improvements 215 | - More comprehensive mutation support 216 | - searchTerms option optimization 217 | - Ability to store/read only the whole query 218 | - Hill Climber optimization for W-TinyLFU cache size allocation 219 | - Developer Tool server-side cache integration 220 | - Developer Tool View Cache component, and Playground component 221 | 222 | ## Authors 223 | [David Kim](https://github.com/davidtoyoukim) 224 | [David Norman](https://github.com/DavidMNorman) 225 | [Eileen Cho](https://github.com/exlxxn) 226 | [Joan Manto](https://github.com/JoanManto) 227 | [Alex Lopez](https://github.com/AlexLopez7) 228 | [Kevin Huang](https://github.com/kevin-06-huang) 229 | [Matthew Weisker](https://github.com/mweisker) 230 | [Ryan Ranjbaran](https://github.com/ranjrover) 231 | [Derek Okuno](https://github.com/okunod) 232 | [Liam Johnson](https://github.com/liamdimitri) 233 | [Josh Reed](https://github.com/joshreed104) 234 | [Jonathan Fangon](https://github.com/jonathanfangon) 235 | [Liam Jeon](https://github.com/laj52) 236 | [Yurii Shchyrba](https://github.com/YuriiShchyrba) 237 | [Linda Zhao](https://github.com/lzhao15) 238 | [Ali Fay](https://github.com/ali-fay) 239 | [Anthony Guan](https://github.com/guananthony) 240 | [Yasir Choudhury](https://github.com/Yasir-Choudhury) 241 | [Yogi Paturu](https://github.com/YogiPaturu) 242 | [Michael Chin](https://github.com/mikechin37) 243 | [Dana Flury](https://github.com/dmflury) 244 | [Sardor Akhmedov](https://github.com/sarkamedo) 245 | [Christopher Berry](https://github.com/cjamesb) 246 | [Olivia Yeghiazarian](https://github.com/Olivia-code) 247 | [Michael Melville](https://github.com/meekle) 248 | [John Wong](https://github.com/johnwongfc) 249 | [Kyung Lee](https://github.com/kyunglee1) 250 | [Justin McKay](https://github.com/justinwmckay) 251 | [Patrick Sullivan](https://github.com/pjmsullivan) 252 | [Cameron Simmons](https://github.com/cssim22) 253 | [Raymond Ahn](https://github.com/raymondcodes) 254 | [Alonso Garza](https://github.com/Alonsog66) 255 | [Burak Caliskan](https://github.com/CaliskanBurak) 256 | [Matt Meigs](https://github.com/mmeigs) 257 | [Travis Frank](https://github.com/TravisFrankMTG/) 258 | [Lourent Flores](https://github.com/lourentflores) 259 | [Esma Sahraoui](https://github.com/EsmaShr) 260 | [Derek Miller](https://github.com/dsymiller) 261 | [Eric Marcatoma](https://github.com/ericmarc159) 262 | [Spencer Stockton](https://github.com/tonstock) 263 | --------------------------------------------------------------------------------