Skip to content

Commit

Permalink
fix: Incorrect default value of maxEntries for MemoryCache #904 (#906)
Browse files Browse the repository at this point in the history
* Fixed: Incorrect default value of maxEntries for MemoryCache #904

* Added: more test coverage for internal cache
Fixed: do not remove items from cache upon updating existing key.

---------

Co-authored-by: Daan <>
  • Loading branch information
daan944 authored Nov 11, 2024
1 parent 7d004cb commit f7abe3f
Show file tree
Hide file tree
Showing 2 changed files with 101 additions and 3 deletions.
6 changes: 3 additions & 3 deletions lib/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
class MemoryCache {
constructor (maxEntries) {
this.cache = new Map();
this.maxEntries = maxEntries ?? 2 ^ 24 - 1; // Max size for Map is 2 ^ 24.
this.maxEntries = maxEntries ?? 2 ** 24 - 1; // Max size for Map is 2^24.
}

/**
Expand All @@ -33,8 +33,8 @@ class MemoryCache {
* @return {void}
*/
set (key, value, ttl) {
// Evict oldest entry when at capacity.
if (this.cache.size === this.maxEntries) {
// Evict first entry when at capacity - only when it's a new key.
if (this.cache.size === this.maxEntries && this.get(key) === undefined) {
this.cache.delete(this.cache.keys().next().value);
}

Expand Down
98 changes: 98 additions & 0 deletions test/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

const test = require('tape');
const CircuitBreaker = require('../');
const MemoryCache = require('../lib/cache');
const common = require('./common');
const passFail = common.passFail;
const expected = 34;
Expand Down Expand Up @@ -312,3 +313,100 @@ test('Using cache with custom transport', t => {
.then(t.end)
.catch(t.fail);
});

// Testing cache internal features/
test('Default max size of cache.', t => {
t.plan(1);
const c = new MemoryCache();
t.equal(c.maxEntries, 16777215, 'Max size of cache is limited to max size of Map to prevent mem leaks.');
t.end();
});

test('Enforcement of cache max size.', t => {
t.plan(26);

const maxEntries = 10;
const c = new MemoryCache(maxEntries);
t.equal(c.maxEntries, maxEntries, 'Max size is given and respected.');

// note: 1 based index to match size.
for (let i = 1; i <= maxEntries; i++) {
const key = `key${i}`;
c.set(key, i, 0);
t.equal(c.cache.size, i, `${i} entry/ies added, so size is ${i}.`);
t.equal(c.get(key), i, `${key} stored successfully`);
}

// next entry should purge oldest, as maxEntries is hit.
c.set('new entry', 'someval');
t.equal(
c.cache.size,
maxEntries,
`Max size is respected, cache size does not grow over ${maxEntries}.`
);
t.equal(c.get('key1'), undefined, 'Oldest key (key1) is removed');

// update oldest entry.
c.set('key2', 'updated');
c.set('another new entry', 'someval');
t.equal(
c.cache.size,
maxEntries,
`Max size is respected, cache size does not grow over ${maxEntries}.`
);

// And notice Map iterator behavior.
t.equal(c.get('key2'), undefined, 'key2 is purged, as it was oldest. Updates do not count in Maps.');
t.equal(c.get('key3'), 3, 'key3 can be considered older, but is not purged.');

t.end();
});

test('Flush cache.', t => {
t.plan(22);

const maxEntries = 10;
const c = new MemoryCache(maxEntries);

// note: 1 based index to match size.
for (let i = 1; i <= maxEntries; i++) {
const key = `key${i}`;
c.set(key, i, 0);
t.equal(c.cache.size, i, `${i} entry/ies added, so size is ${i}.`);
t.equal(c.get(key), i, `${key} stored successfully`);
}

c.flush();
t.equal(c.cache.size, 0, 'cache flushed successfully');
t.equal(c.get('key5'), undefined, 'all cache entries return undefined');

t.end();
});

test('TTL is checked and entry removed when expired.', t => {
t.plan(8);

const c = new MemoryCache();
const ok = 'cached string value';

c.set('100ms ttl', ok, Date.now() + 100);
c.set('200ms ttl', ok, Date.now() + 200);
c.set('no ttl', ok, 0);

t.equal(c.cache.size, 3, '3 items stored');
t.deepEqual([c.get('100ms ttl'), c.get('200ms ttl'), c.get('no ttl')], [ok, ok, ok], '3 items can be retrieved');

setTimeout(() => {
t.equal(c.get('100ms ttl'), undefined, 'TTL expired, so cache returns undefined.');
t.equal(c.cache.size, 2, 'Upon fetching expired item, it is removed from cache');
t.deepEqual([c.get('200ms ttl'), c.get('no ttl')], [ok, ok], 'Entries that have not yet expired are ok');

setTimeout(() => {
t.equal(c.get('200ms ttl'), undefined, 'TTL expired, so cache returns undefined.');
t.equal(c.cache.size, 1, 'Upon fetching expired item, it is removed from cache');
t.equal(c.get('no ttl'), ok, 'Entries that have not yet expired are ok');

t.end();
}, 200);
}, 101);
});

0 comments on commit f7abe3f

Please sign in to comment.