forked from isaacs/node-lru-cache
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathindex.d.ts
More file actions
207 lines (176 loc) · 6.47 KB
/
index.d.ts
File metadata and controls
207 lines (176 loc) · 6.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
// Type definitions for lru-cache 5.1
// Project: https://github.com/isaacs/node-lru-cache
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
// BendingBender <https://github.com/BendingBender>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
declare class LRUCache<K, V> {
constructor(options?: LRUCache.Options<K, V>)
constructor(max: number)
/**
* Return total length of objects in cache taking into account `length` options function.
*/
readonly length: number
/**
* Return total quantity of objects currently in cache. Note,
* that `stale` (see options) items are returned as part of this item count.
*/
readonly itemCount: number
/**
* Same as Options.allowStale.
*/
allowStale: boolean
/**
* Same as Options.length.
*/
lengthCalculator(value: V): number
/**
* Same as Options.max. Resizes the cache when the `max` changes.
*/
max: number
/**
* Same as Options.maxAge. Resizes the cache when the `maxAge` changes.
*/
maxAge: number
/**
* Will update the "recently used"-ness of the key. They do what you think.
* `maxAge` is optional and overrides the cache `maxAge` option if provided.
*/
set(key: K, value: V, maxAge?: number): boolean
/**
* Will update the "recently used"-ness of the key. They do what you think.
* `maxAge` is optional and overrides the cache `maxAge` option if provided.
*
* If the key is not found, will return `undefined`.
*/
get(key: K): V | undefined
/**
* Returns the key value (or `undefined` if not found) without updating
* the "recently used"-ness of the key.
*
* (If you find yourself using this a lot, you might be using the wrong
* sort of data structure, but there are some use cases where it's handy.)
*/
peek(key: K): V | undefined
/**
* Check if a key is in the cache, without updating the recent-ness
* or deleting it for being stale.
*/
has(key: K): boolean
/**
* Deletes a key out of the cache.
*/
del(key: K): void
/**
* Clear the cache entirely, throwing away all values.
*/
reset(): void
/**
* Manually iterates over the entire cache proactively pruning old entries.
*/
prune(): void
/**
* Just like `Array.prototype.forEach`. Iterates over all the keys in the cache,
* in order of recent-ness. (Ie, more recently used items are iterated over first.)
*/
forEach<T = this>(callbackFn: (this: T, value: V, key: K, cache: this) => void, thisArg?: T): void
/**
* The same as `cache.forEach(...)` but items are iterated over in reverse order.
* (ie, less recently used items are iterated over first.)
*/
rforEach<T = this>(callbackFn: (this: T, value: V, key: K, cache: this) => void, thisArg?: T): void
/**
* Return an array of the keys in the cache.
*/
keys(): K[]
/**
* Return an array of the values in the cache.
*/
values(): V[]
/**
* Return an array of the cache entries ready for serialization and usage with `destinationCache.load(arr)`.
*/
dump(): Array<LRUCache.Entry<K, V>>
/**
* Loads another cache entries array, obtained with `sourceCache.dump()`,
* into the cache. The destination cache is reset before loading new entries
*
* @param cacheEntries Obtained from `sourceCache.dump()`
*/
load(cacheEntries: ReadonlyArray<LRUCache.Entry<K, V>>): void
/**
* Manually trigger trimming the cache.
* This is only necessary when the `autoTrim` option is `false`.
*/
trim?(): void
}
declare namespace LRUCache {
interface Options<K, V> {
/**
* The maximum size of the cache, checked by applying the length
* function to all values in the cache. Not setting this is kind of silly,
* since that's the whole purpose of this lib, but it defaults to `Infinity`.
*/
max?: number
/**
* Maximum age in ms. Items are not pro-actively pruned out as they age,
* but if you try to get an item that is too old, it'll drop it and return
* undefined instead of giving it to you.
*/
maxAge?: number
/**
* Function that is used to calculate the length of stored items.
* If you're storing strings or buffers, then you probably want to do
* something like `function(n, key){return n.length}`. The default
* is `function(){return 1}`, which is fine if you want to store
* `max` like-sized things. The item is passed as the first argument,
* and the key is passed as the second argument.
*/
length?(value: V, key?: K): number
/**
* Function that is called on items when they are dropped from the cache.
* This can be handy if you want to close file descriptors or do other
* cleanup tasks when items are no longer accessible. Called with `key, value`.
* It's called before actually removing the item from the internal cache,
* so if you want to immediately put it back in, you'll have to do that in
* a `nextTick` or `setTimeout` callback or it won't do anything.
*/
dispose?(key: K, value: V): void
/**
* By default, if you set a `maxAge`, it'll only actually pull stale items
* out of the cache when you `get(key)`. (That is, it's not pre-emptively
* doing a `setTimeout` or anything.) If you set `stale:true`, it'll return
* the stale value before deleting it. If you don't set this, then it'll
* return `undefined` when you try to get a stale entry,
* as if it had already been deleted.
*/
stale?: boolean
/**
* By default, if you set a `dispose()` method, then it'll be called whenever
* a `set()` operation overwrites an existing key. If you set this option,
* `dispose()` will only be called when a key falls out of the cache,
* not when it is overwritten.
*/
noDisposeOnSet?: boolean
/**
* When using time-expiring entries with `maxAge`, setting this to `true` will make each
* item's effective time update to the current time whenever it is retrieved from cache,
* causing it to not expire. (It can still fall out of cache based on recency of use, of
* course.)
*/
updateAgeOnGet?: boolean
/**
* By default, the cache will automatically be trimmed to respect
* the `max` length whenever it is mutated. Setting `autoTrim` to `false`
* will prevent this behavior, and only be trimmed when the `trim` method
* is called manually.
*/
autoTrim?: boolean
}
interface Entry<K, V> {
k: K
v: V
e: number
}
}
export = LRUCache