├── .gitignore ├── .npmignore ├── LICENSE ├── LRFUExpirer.js ├── README.md ├── index.d.ts ├── index.js ├── package.json ├── rollup.config.js └── tests ├── benchmark.js └── test.js /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | dist 8 | 9 | # Runtime data 10 | pids 11 | *.pid 12 | *.seed 13 | *.pid.lock 14 | 15 | # Directory for instrumented libs generated by jscoverage/JSCover 16 | lib-cov 17 | 18 | # Coverage directory used by tools like istanbul 19 | coverage 20 | 21 | # nyc test coverage 22 | .nyc_output 23 | 24 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 25 | .grunt 26 | 27 | # Bower dependency directory (https://bower.io/) 28 | bower_components 29 | 30 | # node-waf configuration 31 | .lock-wscript 32 | 33 | # Compiled binary addons (http://nodejs.org/api/addons.html) 34 | build/Release 35 | 36 | # Dependency directories 37 | node_modules/ 38 | jspm_packages/ 39 | 40 | package-lock.json 41 | # Typescript v1 declaration files 42 | typings/ 43 | 44 | # Optional npm cache directory 45 | .npm 46 | 47 | # Optional eslint cache 48 | .eslintcache 49 | 50 | # Optional REPL history 51 | .node_repl_history 52 | 53 | # Output of 'npm pack' 54 | *.tgz 55 | 56 | # Yarn Integrity file 57 | .yarn-integrity 58 | 59 | # dotenv environment variables file 60 | .env 61 | tests/samples 62 | 63 | # Visual Studio Code directory 64 | .vscode 65 | .vs 66 | 67 | build -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Dependency directories 2 | node_modules/ 3 | tests/samples 4 | .vs 5 | build/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Kris Zyp 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /LRFUExpirer.js: -------------------------------------------------------------------------------- 1 | const PINNED_IN_MEMORY = 0x7fffffff 2 | const NOT_IN_LRU = 0x40000000 3 | export const EXPIRED_ENTRY = { 4 | description: 'This cache entry value has been expired from the LRFU cache, and is waiting for garbage collection to be removed.' 5 | } 6 | /* bit pattern: 7 | * < is-in-lru 1 bit > ...< mask/or bits 6 bits > < position in cache - 22 bits > 8 | */ 9 | export class LRFUExpirer { 10 | constructor(options) { 11 | this.lruSize = options && options.lruSize || 0x2000 12 | if (this.lruSize > 0x400000) 13 | throw new Error('The LRU/cache size was larger than the maximum cache size of 16777216 (LRU size of 4194304)') 14 | this.reset() 15 | startTimedCleanup(new WeakRef(this), options && options.cleanupInterval || 60000) 16 | } 17 | delete(entry) { 18 | if (entry.position < NOT_IN_LRU) { 19 | this.lru[(entry.position >> 22) & 3][entry.position & 0x3fffff] = null 20 | } 21 | entry.position |= NOT_IN_LRU 22 | } 23 | used(entry, expirationPriority) { 24 | let originalPosition = entry.position 25 | let orMask 26 | if (expirationPriority < 0) { 27 | // pin this in memory, first remove from LRFU and then mark it as pinned in memory 28 | if (entry.position < NOT_IN_LRU) { 29 | this.lru[(entry.position >> 22) & 3][entry.position & 0x3fffff] = null 30 | } 31 | entry.position = PINNED_IN_MEMORY 32 | return 33 | } else if (entry.position == PINNED_IN_MEMORY && expirationPriority == undefined) { 34 | return 35 | } else if (expirationPriority >= 0) { 36 | let bits = 0 37 | if (expirationPriority > (this.lruSize >> 2)) 38 | expirationPriority = this.lruSize >> 2 39 | while (expirationPriority > 0) { 40 | expirationPriority = expirationPriority >> 1 41 | bits++ 42 | } 43 | expirationPriority = bits 44 | } else { 45 | if (originalPosition >= 0) 46 | expirationPriority = (originalPosition >> 24) & 0x3f 47 | else 48 | expirationPriority = 0 49 | } 50 | 51 | let lruPosition 52 | let lruIndex 53 | if (originalPosition < NOT_IN_LRU) { 54 | lruIndex = (originalPosition >> 22) & 3 55 | if (lruIndex >= 3) 56 | return // can't get any higher than this, don't do anything 57 | let lru = this.lru[lruIndex] 58 | // check to see if it is in the same generation 59 | lruPosition = lru.position 60 | if ((originalPosition > lruPosition ? lruPosition + this.lruSize : lruPosition) - originalPosition < (this.lruSize >> 2)) 61 | return // only recently added, don't promote 62 | lru[originalPosition & 0x3fffff] = null // remove it, we are going to move/promote it 63 | lruIndex++ 64 | } else 65 | lruIndex = 0 66 | this.insertEntry(entry, lruIndex, expirationPriority) 67 | } 68 | insertEntry(entry, lruIndex, expirationPriority) { 69 | let lruPosition, nextLru = this.lru[lruIndex] 70 | let orMask = 0x3fffff >> (22 - expirationPriority) 71 | do { 72 | // put it in the next lru 73 | lruPosition = nextLru.position | orMask 74 | let previousEntry = nextLru[lruPosition & 0x3fffff] 75 | nextLru[lruPosition & 0x3fffff] = entry 76 | if (entry) 77 | entry.position = lruPosition | (expirationPriority << 24) 78 | nextLru.position = ++lruPosition 79 | if ((lruPosition & 0x3fffff) >= this.lruSize) { 80 | // reset at the beginning of the lru cache 81 | lruPosition &= 0x7fc00000 82 | nextLru.position = lruPosition 83 | nextLru.cycles++ 84 | } 85 | entry = previousEntry 86 | if (entry && (nextLru = this.lru[--lruIndex])) { 87 | expirationPriority = ((entry.position || 0) >> 24) & 0x3f 88 | orMask = 0x3fffff >> (22 - expirationPriority) 89 | } else 90 | break 91 | } while (true) 92 | if (entry) {// this one was removed 93 | entry.position |= NOT_IN_LRU 94 | if (entry.cache) 95 | entry.cache.onRemove(entry) 96 | else if (entry.deref) // if we have already registered the entry in the finalization registry, just clear it 97 | entry.value = EXPIRED_ENTRY 98 | } 99 | } 100 | reset() { 101 | /* if (this.lru) { 102 | for (let i = 0; i < 4; i++) { 103 | for (let j = 0, l = this.lru.length; j < l; j++) { 104 | let entry = this.lru[i][j] 105 | if (entry) {// this one was removed 106 | entry.position |= NOT_IN_LRU 107 | if (entry.cache) 108 | entry.cache.onRemove(entry) 109 | else if (entry.deref) // if we have already registered the entry in the finalization registry, just clear it 110 | entry.value = EXPIRED_ENTRY 111 | } 112 | } 113 | } 114 | }*/ 115 | this.lru = [] 116 | for (let i = 0; i < 4; i++) { 117 | this.lru[i] = new Array(this.lruSize) 118 | this.lru[i].position = i << 22 119 | this.lru[i].cycles = 0 120 | } 121 | } 122 | cleanup() { // clean out a portion of the cache, so we can clean up over time if idle 123 | let toClear = this.lruSize >> 4 // 1/16 of the lru cache at a time 124 | for (let i = 3; i >= 0; i--) { 125 | let lru = this.lru[i] 126 | for (let j = 0, l = toClear; j < l; j++) { 127 | if (lru[lru.position & 0x3fffff]) { 128 | toClear-- 129 | this.insertEntry(null, i, 0) 130 | } else { 131 | if ((++lru.position & 0x3fffff) >= this.lruSize) { 132 | // reset at the beginning of the lru cache 133 | lru.position &= 0x7fc00000 134 | lru.cycles++ 135 | } 136 | } 137 | } 138 | } 139 | } 140 | } 141 | function startTimedCleanup(reference, cleanupInterval) { 142 | let interval = setInterval(() => { 143 | let expirer = reference.deref() 144 | if (expirer) 145 | expirer.cleanup() 146 | else 147 | clearInterval(interval) 148 | }, cleanupInterval) 149 | if (interval.unref) 150 | interval.unref() 151 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![npm version](https://img.shields.io/npm/dw/weak-lru-cache)](https://www.npmjs.org/package/weak-lru-cache) 2 | [![npm version](https://img.shields.io/npm/v/weak-lru-cache.svg?style=flat-square)](https://www.npmjs.org/package/weak-lru-cache) 3 | [![license](https://img.shields.io/badge/license-MIT-brightgreen)](LICENSE) 4 | 5 | # weak-lru-cache 6 | 7 | The weak-lru-cache package provides a powerful cache that works in harmony with the JS garbage collection (GC) and least-recently used (LRU) and least-freqently used (LFU) expiration strategy to help cache data with highly optimized cache retention. It uses LRU/LFU (LRFU) expiration to retain referenced data, and then once data has been inactive, it uses weak references (and finalization registry) to allow GC to remove the cached data as part of the normal GC cycles, but still continue to provide cached access to the data as long as it still resides in memory and hasn't been collected. This provides the best of modern expiration strategies combined with optimal GC interaction. 8 | 9 | In a typical GC'ed VM, objects may continue to exist in memory long after they are no longer (strongly) referenced, but using a weak-referencing cached, we allow the GC to collect such data, but the cache can return this data up until the point it is garbaged collected, ensuring much more efficient use of memory. 10 | 11 | This can also be used to ensure a single object identity per key. By storing an object in the cache for a given key, we can check the cache whenever we need that object, and before recreating it, thereby giving us the means to ensure we also use the same object for a given key as long as it still exists in memory. 12 | 13 | This project is tested and run NodeJS and Deno (requires NodeJS v14.10 or higher or Node v13.0 with --harmony-weak-ref flag). 14 | 15 | ## Setup 16 | 17 | Install with (NPM): 18 | 19 | ``` 20 | npm i weak-lru-cache 21 | ``` 22 | And `import` or `require` it to access the constructor: 23 | ``` 24 | import { WeakLRUCache } from 'weak-lru-cache'; 25 | 26 | let myCache = new WeakLRUCache(); 27 | myValue.setValue('key', { greeting: 'hello world' }); 28 | myValue.getValue('key') -> return the object above as long as it is still cached 29 | ``` 30 | Or in Deno, import directly from the [`weakcache` deno.land package](https://deno.land/x/weakcache): 31 | ``` 32 | import { WeakLRUCache } from 'https://deno.land/x/weakcache/index.js'; 33 | ... 34 | ``` 35 | 36 | ## Basic Usage 37 | 38 | The `WeakLRUCache` class extends the native `Map` class and also includes the following methods, which are the primary intended interactions with the cache: 39 | 40 | ### getValue(key) 41 | Gets the value referenced by the given key and returns it. If the value is no longer cached, will return undefined. 42 | 43 | ### setValue(key, value, expirationPriority?) 44 | Sets or inserts the value into the cache, with the given key. This will create a new cache entry to reference your provided value. This also returns the cache entry. 45 | 46 | The `key` can be any JS value. 47 | 48 | If the `value` is an object, it will be stored in the cache, until it expires (as determined by the LRFU expiration policy), *and* is garbage collected. If you provide a primitive value (string, number, boolean, null, etc.), this can not be weakly referenced, so the value will still be stored in the LRFU cache, but once it expires, it will immediately be removed, rather than waiting for GC. 49 | 50 | The `expirationPriority` is an optional directive indicating how quickly the cache entry should expire. A higher value will indicate the entry should expire sooner. This can be used to help limit the amount of memory used by cache if you are loading large objects. Using the default cache size, and entries with varied expirationPriority values, the expiration cache will typically hold a sum of about 100,000 of the expirationPriority values. This means that if you wanted to have a cache to stay around or under 100MB, you could provide the size, divided by 1000 (or using `>> 10` is much faster), as the expirationPriority: 51 | ``` 52 | myCache.setValue('key', bigObject, sizeOfObject >> 10); 53 | ``` 54 | The `expirationPriority` can also be set to `-1` that the value should be `pinned` in memory and never expire, until the entry has been changed (with a positive `expirationPriority`). 55 | 56 | ## `WeakLRUCache(options)` Constructor 57 | 58 | The `WeakLRUCache` constructor supports an optional `options` object parameter that allows specific configuration and cache tuning. The following properties (all optional) can be defined on the `options` object: 59 | 60 | ### cacheSize 61 | This indicates the number of entries to allocate in the cache. This defaults to 32,768, and the maximum allowed value is 16,777,216. 62 | 63 | ### expirer 64 | By default there is a single shared expiration cache, that is a single instance of `LRFUExpirer`. However, you can define your own expiration cache or create separate instances of LRFUExpirer. Generally using a (the default) single instance is preferable since it naturally gives higher priority/recency to more heavily used caches, and allows lesser used caches to expire more of their entries. 65 | 66 | You can also set this to `false` to indicate that no LRU/LRFU cache should be used and the cache should rely entirely on weak-references. 67 | 68 | ### deferRegister 69 | This flag can be set to true to save key and cache information so that it can defer the finalization registration. This tends to be slightly faster for caches that are more heavily dominated by lots of entry replacements (multiple setValue calls to same entries before they expire). However, for (what is generally considered more typical) usage more dominated by setting values and getting them until they expire, the default setting will probably be faster. 70 | 71 | 72 | ## Using Cache Entries 73 | 74 | The `WeakLRUCache` class extends the native `Map` class, and consequently, all the standard Map methods are available. As a Map, all the values are cache entries, which are typically `WeakRef` objects that hold a reference to the cached value, along with retention information. This means that the `get(key)` differs from `getValue(key)` in that it returns the cache entry, which references the value, rather than the value itself. 75 | 76 | The cache entries also can contain metadata about the entry, including information about the cache entries position in the LRFU cache that informs when it will expire. However, you can also set your own properties on the cache entry to store you own metadata. This will be retained until the entry is removed/collected. 77 | 78 | ## License 79 | 80 | MIT 81 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | interface LRFUExpirerOptions { 2 | lruSize?: number; 3 | cleanupInterval?: number; 4 | } 5 | 6 | export class LRFUExpirer { 7 | constructor(options?: LRFUExpirerOptions); 8 | } 9 | 10 | interface WeakLRUCacheOptions { 11 | cacheSize?: number; 12 | expirer?: LRFUExpirer | false; 13 | deferRegister?: boolean; 14 | } 15 | 16 | export class CacheEntry { 17 | value?: V 18 | deref?(): V 19 | } 20 | 21 | export class WeakLRUCache extends Map> { 22 | constructor(options?: WeakLRUCacheOptions); 23 | 24 | /** 25 | * Get a value from the cache, if it is still in memory. If the value is no longer cached, will return undefined. 26 | * @param key The key to use to retrieve the value 27 | */ 28 | getValue(key: K): V | undefined; 29 | /** 30 | * Put a key-value into the cache 31 | * @param key The key to use to insert the entry 32 | * @param value The value to insert into the cache 33 | * @param expirationPriority A priority for expiration, a higher value will expire sooner 34 | */ 35 | setValue(key: K, value: V, expirationPriority?: number): void; 36 | } 37 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | import { LRFUExpirer, EXPIRED_ENTRY } from './LRFUExpirer.js' 2 | export { LRFUExpirer } from './LRFUExpirer.js' 3 | 4 | let defaultExpirer 5 | export class WeakLRUCache extends Map { 6 | constructor(options) { 7 | super() 8 | this.hits = 0 9 | this.misses = 0 10 | if (options && options.cacheSize) { 11 | options.lruSize = options.cacheSize >> 2 12 | } 13 | if (options && options.clearKeptInterval) { 14 | this.clearKeptInterval = options.clearKeptInterval 15 | this.clearKeptCount = 0 16 | this.clearKeptObjects = options.clearKeptObjects 17 | } 18 | this.expirer = (options ? options.expirer === false ? defaultNoLRUExpirer : options.expirer : null) || defaultExpirer || (defaultExpirer = new LRFUExpirer(options)) 19 | this.deferRegister = Boolean(options && options.deferRegister) 20 | let registry = this.registry = new FinalizationRegistry(key => { 21 | let entry = super.get(key) 22 | if (entry && entry.deref && entry.deref() === undefined) 23 | super.delete(key) 24 | }) 25 | } 26 | onRemove(entry) { 27 | let target = entry.deref && entry.deref() 28 | if (target) { 29 | // remove strong reference, so only a weak reference, wait until it is finalized to remove 30 | this.registry.register(target, entry.key) 31 | entry.value = undefined 32 | } else if (entry.key) { 33 | let currentEntry = super.get(entry.key) 34 | if (currentEntry === entry) 35 | super.delete(entry.key) 36 | } 37 | } 38 | get(key, mode) { 39 | let entry = super.get(key) 40 | let value 41 | if (entry) { 42 | this.hits++ 43 | value = entry.value 44 | if (value === EXPIRED_ENTRY) { 45 | value = entry.deref && entry.deref() 46 | if (value === undefined) 47 | super.delete(key) 48 | else { 49 | entry.value = value 50 | if (this.clearKeptInterval) 51 | this.incrementClearKeptCount() 52 | if (mode !== 1) 53 | this.expirer.used(entry) 54 | return mode === 2 ? value : entry 55 | } 56 | } 57 | else { 58 | if (mode !== 1) 59 | this.expirer.used(entry) 60 | return mode === 2 ? value : entry 61 | } 62 | } else 63 | this.misses++ 64 | } 65 | getValue(key) { 66 | return this.get(key, 2) 67 | } 68 | 69 | setValue(key, value, expirationPriority) { 70 | let entry 71 | if (value && typeof value == 'object') { 72 | entry = new WeakRef(value) 73 | if (this.clearKeptInterval) 74 | this.incrementClearKeptCount() 75 | entry.value = value 76 | if (this.deferRegister) { 77 | entry.key = key 78 | entry.cache = this 79 | } else 80 | this.registry.register(value, key) 81 | } else if (value !== undefined) 82 | entry = { value, key, cache: this } 83 | // else entry is undefined 84 | this.set(key, entry, expirationPriority) 85 | return entry 86 | } 87 | incrementClearKeptCount() { 88 | if (++this.clearKeptCount >= this.clearKeptInterval) { 89 | this.clearKeptCount = 0 90 | if (this.clearKeptObjects) 91 | this.clearKeptObjects() 92 | if (this.registry.cleanupSome) 93 | this.registry.cleanupSome() 94 | } 95 | } 96 | set(key, entry, expirationPriority) { 97 | let oldEntry = super.get(key) 98 | if (oldEntry) 99 | this.expirer.delete(oldEntry) 100 | return this.insert(key, entry, expirationPriority) 101 | } 102 | insert(key, entry, expirationPriority) { 103 | if (entry) { 104 | this.expirer.used(entry, expirationPriority) 105 | } 106 | return super.set(key, entry) 107 | } 108 | delete(key) { 109 | let oldEntry = super.get(key) 110 | if (oldEntry) { 111 | this.expirer.delete(oldEntry) 112 | } 113 | return super.delete(key) 114 | } 115 | used(entry, expirationPriority) { 116 | this.expirer.used(entry, expirationPriority) 117 | } 118 | clear() { 119 | for (let [ key, entry ] of this) { 120 | this.expirer.delete(entry) 121 | super.delete(key) 122 | } 123 | } 124 | } 125 | 126 | class NoLRUExpirer { 127 | used(entry) { 128 | if (entry.cache) 129 | entry.cache.onRemove(entry) 130 | else if (entry.deref) // if we have already registered the entry in the finalization registry, just mark it expired from the beginning 131 | entry.value = EXPIRED_ENTRY 132 | } 133 | delete(entry) { 134 | // nothing to do here, we don't have a separate cache here 135 | } 136 | } 137 | const defaultNoLRUExpirer = new NoLRUExpirer() -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "weak-lru-cache", 3 | "author": "Kris Zyp", 4 | "version": "1.2.2", 5 | "description": "An LRU cache of weak references", 6 | "license": "MIT", 7 | "types": "./index.d.ts", 8 | "keywords": [ 9 | "cache", 10 | "weak", 11 | "references", 12 | "LRU", 13 | "LRFU" 14 | ], 15 | "repository": { 16 | "type": "git", 17 | "url": "http://github.com/kriszyp/weak-lru-cache" 18 | }, 19 | "type": "module", 20 | "main": "dist/index.cjs", 21 | "module": "index.js", 22 | "exports": { 23 | ".": { 24 | "require": "./dist/index.cjs", 25 | "import": "./index.js" 26 | }, 27 | "./index.js": { 28 | "require": "./dist/index.cjs", 29 | "import": "./index.js" 30 | } 31 | }, 32 | "scripts": { 33 | "build": "rollup -c", 34 | "prepare": "rollup -c", 35 | "test": "./node_modules/.bin/mocha tests/test*.js -u tdd" 36 | }, 37 | "devDependencies": { 38 | "benchmark": "^2.1.4", 39 | "chai": "^4", 40 | "mocha": "^8", 41 | "rollup": "^1.20.3" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | export default [ 2 | { 3 | input: "index.js", 4 | output: [ 5 | { 6 | file: "dist/index.cjs", 7 | format: "cjs" 8 | } 9 | ] 10 | } 11 | ]; 12 | -------------------------------------------------------------------------------- /tests/benchmark.js: -------------------------------------------------------------------------------- 1 | var inspector = require('inspector') 2 | //inspector.open(9330, null, true) 3 | var benchmark = require('benchmark') 4 | const { WeakLRUCache } = require('..') 5 | var suite = new benchmark.Suite(); 6 | 7 | let cache = new WeakLRUCache() 8 | cache.loadValue = function() { 9 | return {} 10 | } 11 | let strongObject = cache.getValue(1) 12 | 13 | function hit() { 14 | let o = cache.getValue(1) 15 | } 16 | let i = 0 17 | let time = 0 18 | function miss(deferred) { 19 | i++ 20 | cache.getValue(i) 21 | if (i % 30000== 0) { 22 | let lastTime = time 23 | time = Date.now() 24 | sizes.push(cache.size, time-lastTime) 25 | return setImmediate(() => deferred.resolve(), 10) 26 | } 27 | if (i % 100 == 0) 28 | return Promise.resolve().then(() => deferred.resolve()) 29 | 30 | deferred.resolve() 31 | } 32 | let sizes = [] 33 | //suite.add('hit', hit); 34 | suite.add('miss', { 35 | defer: true, 36 | fn: miss, 37 | }) 38 | suite.on('cycle', function (event) { 39 | console.log(String(event.target)); 40 | }); 41 | suite.on('complete', function () { 42 | console.log('Fastest is ' + this.filter('fastest').map('name')); 43 | console.log(JSON.stringify(sizes)) 44 | }); 45 | 46 | suite.run({ async: true }); 47 | -------------------------------------------------------------------------------- /tests/test.js: -------------------------------------------------------------------------------- 1 | import { WeakLRUCache } from '../index.js' 2 | import chai from 'chai' 3 | const assert = chai.assert 4 | let cache = new WeakLRUCache() 5 | 6 | suite('WeakLRUCache basic tests', function(){ 7 | test('add entries', function(){ 8 | let entry = cache.getValue(2) 9 | assert.equal(entry, undefined) 10 | let obj = {} 11 | cache.setValue(2, obj) 12 | assert.equal(cache.getValue(2), obj) 13 | debugger 14 | if (cache.expirer.clean) 15 | cache.expirer.clean() 16 | assert.equal(cache.getValue(2), obj) 17 | }) 18 | }) --------------------------------------------------------------------------------