Skip to content

Commit 3e6c377

Browse files
committed
Rewrite defaultMemoize to use an LRU cache with configurable size
Implementation based on Erik Rasmussen's `lru-memoize`: https://github.com/erikras/lru-memoize
1 parent c2b6b80 commit 3e6c377

File tree

2 files changed

+161
-25
lines changed

2 files changed

+161
-25
lines changed

src/defaultMemoize.ts

Lines changed: 102 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,98 @@
11
import type { EqualityFn } from './types'
22

3-
export const defaultEqualityCheck: EqualityFn = (a, b): boolean => {
4-
return a === b
3+
// Cache implementation based on Erik Rasmussen's `lru-memoize`:
4+
// https://github.com/erikras/lru-memoize
5+
6+
interface Entry {
7+
key: any
8+
value: any
9+
}
10+
11+
interface Cache {
12+
get(key: any): Entry | undefined
13+
put(key: any, value: any): void
514
}
615

7-
function areArgumentsShallowlyEqual(
8-
equalityCheck: EqualityFn,
9-
prev: unknown[] | IArguments | null,
10-
next: unknown[] | IArguments | null
11-
): boolean {
12-
if (prev === null || next === null || prev.length !== next.length) {
13-
return false
16+
function createSingletonCache(equals: EqualityFn): Cache {
17+
let entry: Entry
18+
return {
19+
get(key: any) {
20+
if (entry && equals(entry.key, key)) {
21+
return entry.value
22+
}
23+
},
24+
25+
put(key: any, value: any) {
26+
entry = { key, value }
27+
}
1428
}
29+
}
1530

16-
// Do this in a for loop (and not a `forEach` or an `every`) so we can determine equality as fast as possible.
17-
const length = prev.length
18-
for (let i = 0; i < length; i++) {
19-
if (!equalityCheck(prev[i], next[i])) {
20-
return false
31+
function createLruCache(maxSize: number, equals: EqualityFn): Cache {
32+
const entries: Entry[] = []
33+
34+
function get(key: any) {
35+
const cacheIndex = entries.findIndex(entry => equals(key, entry.key))
36+
37+
// We found a cached entry
38+
if (cacheIndex > -1) {
39+
const entry = entries[cacheIndex]
40+
41+
// Cached entry not at top of cache, move it to the top
42+
if (cacheIndex > 0) {
43+
entries.splice(cacheIndex, 1)
44+
entries.unshift(entry)
45+
}
46+
47+
return entry.value
48+
}
49+
50+
// No entry found in cache, return null
51+
return undefined
52+
}
53+
54+
function put(key: any, value: any) {
55+
if (!get(key)) {
56+
// TODO Is unshift slow?
57+
entries.unshift({ key, value })
58+
if (entries.length > maxSize) {
59+
entries.pop()
60+
}
2161
}
2262
}
2363

24-
return true
64+
return { get, put }
65+
}
66+
67+
export const defaultEqualityCheck: EqualityFn = (a, b): boolean => {
68+
return a === b
69+
}
70+
71+
function createCacheKeyComparator(equalityCheck: EqualityFn) {
72+
return function areArgumentsShallowlyEqual(
73+
prev: unknown[] | IArguments | null,
74+
next: unknown[] | IArguments | null
75+
): boolean {
76+
if (prev === null || next === null || prev.length !== next.length) {
77+
return false
78+
}
79+
80+
// Do this in a for loop (and not a `forEach` or an `every`) so we can determine equality as fast as possible.
81+
const length = prev.length
82+
for (let i = 0; i < length; i++) {
83+
if (!equalityCheck(prev[i], next[i])) {
84+
return false
85+
}
86+
}
87+
88+
return true
89+
}
2590
}
2691

2792
export interface DefaultMemoizeOptions {
2893
equalityCheck?: EqualityFn
94+
resultEqualityCheck?: EqualityFn
95+
maxSize?: number
2996
}
3097

3198
// defaultMemoize now supports a configurable cache size and comparison of the result value.
@@ -34,25 +101,35 @@ export function defaultMemoize<F extends (...args: any[]) => any>(
34101
func: F,
35102
equalityCheckOrOptions?: EqualityFn | DefaultMemoizeOptions
36103
): F {
37-
let lastArgs: any = null
38-
let lastResult: any = null
39-
40104
const providedOptions =
41105
typeof equalityCheckOrOptions === 'object'
42106
? equalityCheckOrOptions
43107
: { equalityCheck: equalityCheckOrOptions }
44108

45-
const { equalityCheck = defaultEqualityCheck } = providedOptions
109+
const {
110+
equalityCheck = defaultEqualityCheck,
111+
maxSize = 1,
112+
resultEqualityCheck
113+
} = providedOptions
114+
115+
const comparator = createCacheKeyComparator(equalityCheck)
116+
let resultComparator = resultEqualityCheck
117+
? createCacheKeyComparator(resultEqualityCheck)
118+
: undefined
119+
120+
const cache =
121+
maxSize === 1
122+
? createSingletonCache(comparator)
123+
: createLruCache(maxSize, comparator)
46124

47125
// we reference arguments instead of spreading them for performance reasons
48126
return function () {
49-
if (!areArgumentsShallowlyEqual(equalityCheck, lastArgs, arguments)) {
50-
// apply arguments instead of spreading for performance.
127+
let value = cache.get(arguments)
128+
if (value === undefined) {
51129
// @ts-ignore
52-
lastResult = func.apply(null, arguments)
130+
value = func.apply(null, arguments)
131+
cache.put(arguments, value)
53132
}
54-
55-
lastArgs = arguments
56-
return lastResult
133+
return value
57134
} as F
58135
}

test/test_selector.ts

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -447,13 +447,72 @@ describe('defaultMemoize', () => {
447447
// call with different object does shallow compare
448448
expect(fallthroughs).toBe(1)
449449

450+
/*
451+
This test was useful when we had a cache size of 1 previously, and always saved `lastArgs`.
452+
But, with the new implementation, this doesn't make sense any more.
453+
454+
450455
// the third call does not fall through because `defaultMemoize` passes `anotherObject` as
451456
// both the `newVal` and `oldVal` params. This allows `shallowEqual` to be much more performant
452457
// than if it had passed `someObject` as `oldVal`, even though `someObject` and `anotherObject`
453458
// are shallowly equal
454459
memoized(anotherObject)
455460
// call with same object as previous call does not shallow compare
456461
expect(fallthroughs).toBe(1)
462+
463+
*/
464+
})
465+
466+
test('Accepts a max size greater than 1 with LRU cache behavior', () => {
467+
let funcCalls = 0
468+
469+
const memoizer = defaultMemoize(
470+
(state: any) => {
471+
funcCalls++
472+
return state
473+
},
474+
{
475+
maxSize: 3
476+
}
477+
)
478+
479+
// Initial call
480+
memoizer('a') // ['a']
481+
expect(funcCalls).toBe(1)
482+
483+
// In cache - memoized
484+
memoizer('a') // ['a']
485+
expect(funcCalls).toBe(1)
486+
487+
// Added
488+
memoizer('b') // ['b', 'a']
489+
expect(funcCalls).toBe(2)
490+
491+
// Added
492+
memoizer('c') // ['c', 'b', 'a']
493+
expect(funcCalls).toBe(3)
494+
495+
// Added, removes 'a'
496+
memoizer('d') // ['d', 'c', 'b']
497+
expect(funcCalls).toBe(4)
498+
499+
// No longer in cache, re-added, removes 'b'
500+
memoizer('a') // ['a', 'd', 'c']
501+
expect(funcCalls).toBe(5)
502+
503+
// In cache, moved to front
504+
memoizer('c') // ['c', 'a', 'd']
505+
expect(funcCalls).toBe(5)
506+
507+
// Added, removes 'd'
508+
memoizer('e') // ['e', 'c', 'a']
509+
expect(funcCalls).toBe(6)
510+
511+
// No longer in cache, re-added, removes 'a'
512+
memoizer('d') // ['d', 'e', 'c']
513+
expect(funcCalls).toBe(7)
514+
})
515+
457516
})
458517

459518
test('Accepts an options object as an arg', () => {

0 commit comments

Comments
 (0)