source: node_modules/reselect/src/lruMemoize.ts@ d24f17c

main
Last change on this file since d24f17c was d24f17c, checked in by Aleksandar Panovski <apano77@…>, 15 months ago

Initial commit

  • Property mode set to 100644
File size: 6.1 KB
RevLine 
[d24f17c]1import type {
2 AnyFunction,
3 DefaultMemoizeFields,
4 EqualityFn,
5 Simplify
6} from './types'
7
8import type { NOT_FOUND_TYPE } from './utils'
9import { NOT_FOUND } from './utils'
10
11// Cache implementation based on Erik Rasmussen's `lru-memoize`:
12// https://github.com/erikras/lru-memoize
13
14interface Entry {
15 key: unknown
16 value: unknown
17}
18
19interface Cache {
20 get(key: unknown): unknown | NOT_FOUND_TYPE
21 put(key: unknown, value: unknown): void
22 getEntries(): Entry[]
23 clear(): void
24}
25
26function createSingletonCache(equals: EqualityFn): Cache {
27 let entry: Entry | undefined
28 return {
29 get(key: unknown) {
30 if (entry && equals(entry.key, key)) {
31 return entry.value
32 }
33
34 return NOT_FOUND
35 },
36
37 put(key: unknown, value: unknown) {
38 entry = { key, value }
39 },
40
41 getEntries() {
42 return entry ? [entry] : []
43 },
44
45 clear() {
46 entry = undefined
47 }
48 }
49}
50
51function createLruCache(maxSize: number, equals: EqualityFn): Cache {
52 let entries: Entry[] = []
53
54 function get(key: unknown) {
55 const cacheIndex = entries.findIndex(entry => equals(key, entry.key))
56
57 // We found a cached entry
58 if (cacheIndex > -1) {
59 const entry = entries[cacheIndex]
60
61 // Cached entry not at top of cache, move it to the top
62 if (cacheIndex > 0) {
63 entries.splice(cacheIndex, 1)
64 entries.unshift(entry)
65 }
66
67 return entry.value
68 }
69
70 // No entry found in cache, return sentinel
71 return NOT_FOUND
72 }
73
74 function put(key: unknown, value: unknown) {
75 if (get(key) === NOT_FOUND) {
76 // TODO Is unshift slow?
77 entries.unshift({ key, value })
78 if (entries.length > maxSize) {
79 entries.pop()
80 }
81 }
82 }
83
84 function getEntries() {
85 return entries
86 }
87
88 function clear() {
89 entries = []
90 }
91
92 return { get, put, getEntries, clear }
93}
94
95/**
96 * Runs a simple reference equality check.
97 * What {@linkcode lruMemoize lruMemoize} uses by default.
98 *
99 * **Note**: This function was previously known as `defaultEqualityCheck`.
100 *
101 * @public
102 */
103export const referenceEqualityCheck: EqualityFn = (a, b) => a === b
104
105export function createCacheKeyComparator(equalityCheck: EqualityFn) {
106 return function areArgumentsShallowlyEqual(
107 prev: unknown[] | IArguments | null,
108 next: unknown[] | IArguments | null
109 ): boolean {
110 if (prev === null || next === null || prev.length !== next.length) {
111 return false
112 }
113
114 // Do this in a for loop (and not a `forEach` or an `every`) so we can determine equality as fast as possible.
115 const { length } = prev
116 for (let i = 0; i < length; i++) {
117 if (!equalityCheck(prev[i], next[i])) {
118 return false
119 }
120 }
121
122 return true
123 }
124}
125
126/**
127 * Options for configuring the behavior of a function memoized with
128 * LRU (Least Recently Used) caching.
129 *
130 * @template Result - The type of the return value of the memoized function.
131 *
132 * @public
133 */
134export interface LruMemoizeOptions<Result = any> {
135 /**
136 * Function used to compare the individual arguments of the
137 * provided calculation function.
138 *
139 * @default referenceEqualityCheck
140 */
141 equalityCheck?: EqualityFn
142
143 /**
144 * If provided, used to compare a newly generated output value against
145 * previous values in the cache. If a match is found,
146 * the old value is returned. This addresses the common
147 * ```ts
148 * todos.map(todo => todo.id)
149 * ```
150 * use case, where an update to another field in the original data causes
151 * a recalculation due to changed references, but the output is still
152 * effectively the same.
153 *
154 * @since 4.1.0
155 */
156 resultEqualityCheck?: EqualityFn<Result>
157
158 /**
159 * The maximum size of the cache used by the selector.
160 * A size greater than 1 means the selector will use an
161 * LRU (Least Recently Used) cache, allowing for the caching of multiple
162 * results based on different sets of arguments.
163 *
164 * @default 1
165 */
166 maxSize?: number
167}
168
169/**
170 * Creates a memoized version of a function with an optional
171 * LRU (Least Recently Used) cache. The memoized function uses a cache to
172 * store computed values. Depending on the `maxSize` option, it will use
173 * either a singleton cache (for a single entry) or an
174 * LRU cache (for multiple entries).
175 *
176 * **Note**: This function was previously known as `defaultMemoize`.
177 *
178 * @param func - The function to be memoized.
179 * @param equalityCheckOrOptions - Either an equality check function or an options object.
180 * @returns A memoized function with a `.clearCache()` method attached.
181 *
182 * @template Func - The type of the function that is memoized.
183 *
184 * @see {@link https://reselect.js.org/api/lruMemoize `lruMemoize`}
185 *
186 * @public
187 */
188export function lruMemoize<Func extends AnyFunction>(
189 func: Func,
190 equalityCheckOrOptions?: EqualityFn | LruMemoizeOptions<ReturnType<Func>>
191) {
192 const providedOptions =
193 typeof equalityCheckOrOptions === 'object'
194 ? equalityCheckOrOptions
195 : { equalityCheck: equalityCheckOrOptions }
196
197 const {
198 equalityCheck = referenceEqualityCheck,
199 maxSize = 1,
200 resultEqualityCheck
201 } = providedOptions
202
203 const comparator = createCacheKeyComparator(equalityCheck)
204
205 let resultsCount = 0
206
207 const cache =
208 maxSize === 1
209 ? createSingletonCache(comparator)
210 : createLruCache(maxSize, comparator)
211
212 function memoized() {
213 let value = cache.get(arguments) as ReturnType<Func>
214 if (value === NOT_FOUND) {
215 // apply arguments instead of spreading for performance.
216 // @ts-ignore
217 value = func.apply(null, arguments) as ReturnType<Func>
218 resultsCount++
219
220 if (resultEqualityCheck) {
221 const entries = cache.getEntries()
222 const matchingEntry = entries.find(entry =>
223 resultEqualityCheck(entry.value as ReturnType<Func>, value)
224 )
225
226 if (matchingEntry) {
227 value = matchingEntry.value as ReturnType<Func>
228 resultsCount !== 0 && resultsCount--
229 }
230 }
231
232 cache.put(arguments, value)
233 }
234 return value
235 }
236
237 memoized.clearCache = () => {
238 cache.clear()
239 memoized.resetResultsCount()
240 }
241
242 memoized.resultsCount = () => resultsCount
243
244 memoized.resetResultsCount = () => {
245 resultsCount = 0
246 }
247
248 return memoized as Func & Simplify<DefaultMemoizeFields>
249}
Note: See TracBrowser for help on using the repository browser.