-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcache.ts
More file actions
429 lines (375 loc) · 12.5 KB
/
cache.ts
File metadata and controls
429 lines (375 loc) · 12.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
/**
* @fileoverview `createTtlCache` — generic TTL-based cache built on
* top of cacache (persistent) plus an in-memory LRU memo layer.
*
* Two-tier caching: hot data lives in `memoCache` (Map<string,
* TtlCacheEntry>) capped at `memoMaxSize` entries with LRU eviction
* via Map insertion-order semantics. Persistent storage uses cacache
* so cached values survive process restarts.
*
* Key features:
* - Per-key namespacing via `prefix` so multiple caches share one
* cacache directory without conflicting.
* - `getOrFetch` deduplicates concurrent requests for the same key
* (thundering-herd protection via `inflightRequests` map).
* - Wildcard support for `getAll` / `deleteAll` (single-key methods
* throw on `*`).
* - Clock-skew detection: entries with suspiciously-far-future
* `expiresAt` are treated as expired.
*/
import { clear as cacacheClear } from '../cacache/clear'
import { safeGet as cacacheSafeGet } from '../cacache/read'
import { put as cacachePut, remove as cacacheRemove } from '../cacache/write'
import { DateNow } from '../primordials/date'
import { TypeErrorCtor } from '../primordials/error'
import { JSONParse, JSONStringify } from '../primordials/json'
import { MapCtor } from '../primordials/map-set'
import { MathMax } from '../primordials/math'
import { RegExpCtor, RegExpPrototypeTest } from '../primordials/regexp'
import {
StringPrototypeIncludes,
StringPrototypeReplaceAll,
StringPrototypeSlice,
StringPrototypeStartsWith,
} from '../primordials/string'
import type {
ClearOptions,
TtlCache,
TtlCacheEntry,
TtlCacheOptions,
} from './types'
// 5 minutes
const DEFAULT_TTL_MS = 5 * 60 * 1000
const DEFAULT_PREFIX = 'ttl-cache'
// Cap the in-memory memoization layer. Without this, a long-running
// daemon (devserver, editor extension) that queries many distinct keys
// accumulates entries forever — expired entries are only reclaimed when
// that exact key is read again. Cacache on disk is unaffected.
const DEFAULT_MEMO_MAX_SIZE = 1000
/**
* Create a TTL-based cache instance.
*
* @example
* ```typescript
* const cache = createTtlCache({ ttl: 60_000, prefix: 'my-app' })
* await cache.set('key', { value: 42 })
* const data = await cache.get('key') // { value: 42 }
* ```
*/
export function createTtlCache(options?: TtlCacheOptions): TtlCache {
const opts = {
__proto__: null,
memoize: true,
memoMaxSize: DEFAULT_MEMO_MAX_SIZE,
prefix: DEFAULT_PREFIX,
ttl: DEFAULT_TTL_MS,
...options,
} as Required<TtlCacheOptions>
// Validate prefix does not contain wildcards.
if (opts.prefix?.includes('*')) {
throw new TypeErrorCtor(
'Cache prefix cannot contain wildcards (*). Use clear({ prefix: "pattern*" }) for wildcard matching.',
)
}
// In-memory cache for hot data. Capped via opts.memoMaxSize using a
// Map's insertion-order semantics as the LRU list: `memoSet` deletes
// the key first so a re-insert moves it to the tail, and when size
// exceeds the cap we evict the oldest entry (first key in iteration).
const memoCache = new MapCtor<string, TtlCacheEntry<unknown>>()
const memoMaxSize = MathMax(1, opts.memoMaxSize ?? DEFAULT_MEMO_MAX_SIZE)
function memoSet(fullKey: string, entry: TtlCacheEntry<unknown>): void {
// LRU has-existing tested via re-set; size>=max requires the cache
// to fill (default 100). The oldest!==undefined guard is defensive
// and unreachable when size>=max.
/* c8 ignore start */
if (memoCache.has(fullKey)) {
memoCache.delete(fullKey)
} else if (memoCache.size >= memoMaxSize) {
const oldest = memoCache.keys().next().value
if (oldest !== undefined) {
memoCache.delete(oldest)
}
}
/* c8 ignore stop */
memoCache.set(fullKey, entry)
}
// Ensure ttl is defined. opts.ttl-undefined arm fires for default-ttl
// callers which is the common case.
/* c8 ignore next */
const ttl = opts.ttl ?? DEFAULT_TTL_MS
/**
* Build full cache key with prefix.
*/
function buildKey(key: string): string {
return `${opts.prefix}:${key}`
}
/**
* Check if entry is expired.
* Also detects clock skew by treating suspiciously far-future expiresAt as expired.
*/
function isExpired(entry: TtlCacheEntry<unknown>): boolean {
const now = DateNow()
// Detect future expiresAt (clock skew or corruption).
// If expiresAt is more than 10 seconds past expected expiry, treat as expired.
const maxFutureMs = 10_000
if (entry.expiresAt > now + ttl + maxFutureMs) {
return true
}
return now > entry.expiresAt
}
/**
* Create a matcher function for a pattern (with wildcard support).
* Returns a function that tests if a key matches the pattern.
*/
function createMatcher(pattern: string): (key: string) => boolean {
const fullPattern = buildKey(pattern)
const hasWildcard = pattern.includes('*')
if (!hasWildcard) {
// Simple prefix matching (fast path).
return (key: string) => StringPrototypeStartsWith(key, fullPattern)
}
// Wildcard matching with regex. Anchor both ends so `foo*bar` matches
// exactly `foo<anything>bar` and not `foo<anything>bar<anything else>`.
const escaped = StringPrototypeReplaceAll(
fullPattern,
/[.+?^${}()|[\]\\]/g,
'\\$&',
)
const regexPattern = StringPrototypeReplaceAll(escaped, '*', '.*')
const regex = new RegExpCtor(`^${regexPattern}$`)
return (key: string) => RegExpPrototypeTest(regex, key)
}
async function get<T>(key: string): Promise<T | undefined> {
if (StringPrototypeIncludes(key, '*')) {
throw new TypeErrorCtor(
'Cache key cannot contain wildcards (*). Use getAll(pattern) to retrieve multiple entries.',
)
}
const fullKey = buildKey(key)
// Check in-memory cache first.
if (opts.memoize) {
const memoEntry = memoCache.get(fullKey)
if (memoEntry && !isExpired(memoEntry)) {
// Bump recency so the LRU eviction prefers colder entries.
memoSet(fullKey, memoEntry)
return memoEntry.data as T
}
// Remove expired memo entry.
if (memoEntry) {
memoCache.delete(fullKey)
}
}
// Check persistent cache.
const cacheEntry = await cacacheSafeGet(fullKey)
if (cacheEntry) {
let entry: TtlCacheEntry<T>
try {
entry = JSONParse(cacheEntry.data.toString('utf8')) as TtlCacheEntry<T>
} catch {
// Corrupted cache entry, treat as miss and remove.
try {
await cacacheRemove(fullKey)
} catch {
// Ignore removal errors.
}
return undefined
}
if (!isExpired(entry)) {
// Update in-memory cache.
if (opts.memoize) {
memoSet(fullKey, entry)
}
return entry.data
}
// Remove-expired-entry catch fires only when entry is missing
// or cache dir is inaccessible.
/* c8 ignore start */
try {
await cacacheRemove(fullKey)
} catch {}
/* c8 ignore stop */
}
return undefined
}
async function getAll<T>(pattern: string): Promise<Map<string, T>> {
const results = new MapCtor<string, T>()
const matches = createMatcher(pattern)
/* c8 ignore start */
if (opts.memoize) {
for (const [key, entry] of memoCache.entries()) {
if (!matches(key)) {
continue
}
if (isExpired(entry)) {
memoCache.delete(key)
continue
}
const originalKey = opts.prefix
? StringPrototypeSlice(key, opts.prefix.length + 1)
: key
results.set(originalKey, entry.data as T)
}
}
/* c8 ignore stop */
// Check persistent cache for entries not in memory.
const cacheDir = (await import('../paths/socket')).getSocketCacacheDir()
const cacacheModule = await import('../cacache/_internal')
const stream = cacacheModule.getCacache().ls.stream(cacheDir)
for await (const cacheEntry of stream) {
// Skip if doesn't match our cache prefix.
if (!cacheEntry.key.startsWith(`${opts.prefix}:`)) {
continue
}
// Skip if doesn't match pattern.
if (!matches(cacheEntry.key)) {
continue
}
// Skip if already in results (from memory).
const originalKey = opts.prefix
? cacheEntry.key.slice(opts.prefix.length + 1)
: cacheEntry.key
if (results.has(originalKey)) {
continue
}
// Get entry from cache.
try {
const entry = await cacacheSafeGet(cacheEntry.key)
if (!entry) {
continue
}
const parsed = JSONParse(
entry.data.toString('utf8'),
) as TtlCacheEntry<T>
// Skip if expired.
if (isExpired(parsed)) {
await cacacheRemove(cacheEntry.key)
continue
}
// Add to results.
results.set(originalKey, parsed.data)
// Update in-memory cache.
if (opts.memoize) {
memoSet(cacheEntry.key, parsed)
}
} catch {
// Ignore parse errors or other issues.
}
}
return results
}
async function set<T>(key: string, data: T): Promise<void> {
if (StringPrototypeIncludes(key, '*')) {
throw new TypeErrorCtor(
'Cache key cannot contain wildcards (*). Wildcards are only supported in clear({ prefix: "pattern*" }).',
)
}
const fullKey = buildKey(key)
const entry: TtlCacheEntry<T> = {
data,
expiresAt: DateNow() + ttl,
}
// Update in-memory cache first (synchronous and fast).
if (opts.memoize) {
memoSet(fullKey, entry)
}
// Update persistent cache (don't fail if this errors).
try {
await cacachePut(fullKey, JSONStringify(entry), {
metadata: { expiresAt: entry.expiresAt },
})
} catch {
// Ignore persistent cache errors - in-memory cache is the source of truth.
}
}
// Track in-flight fetch requests to prevent duplicate fetches
const inflightRequests = new MapCtor<string, Promise<unknown>>()
async function getOrFetch<T>(
key: string,
fetcher: () => Promise<T>,
): Promise<T> {
const fullKey = buildKey(key)
/* c8 ignore start */
const preexisting = inflightRequests.get(fullKey)
if (preexisting) {
return (await preexisting) as T
}
/* c8 ignore stop */
const cached = await get<T>(key)
if (cached !== undefined) {
return cached
}
/* c8 ignore start */
const rechecked = inflightRequests.get(fullKey)
if (rechecked) {
return (await rechecked) as T
}
/* c8 ignore stop */
// Create promise with cleanup handlers
const promise = (async () => {
try {
const data = await fetcher()
await set(key, data)
return data
} finally {
// Clean up on both success and error
inflightRequests.delete(fullKey)
}
})()
// Register before awaiting so subsequent callers join this fetch.
inflightRequests.set(fullKey, promise)
// Await and return (cleanup happens in finally block)
return await promise
}
async function deleteEntry(key: string): Promise<void> {
if (StringPrototypeIncludes(key, '*')) {
throw new TypeErrorCtor(
'Cache key cannot contain wildcards (*). Use deleteAll(pattern) to remove multiple entries.',
)
}
const fullKey = buildKey(key)
memoCache.delete(fullKey)
try {
await cacacheRemove(fullKey)
} catch {
// Ignore removal errors - entry may not exist or cache may be inaccessible.
}
}
async function deleteAll(pattern?: string | undefined): Promise<number> {
// Build full prefix/pattern by combining cache prefix with optional pattern.
const fullPrefix = pattern ? `${opts.prefix}:${pattern}` : `${opts.prefix}:`
// Delete matching in-memory entries.
if (!pattern) {
memoCache.clear()
} else {
const matches = createMatcher(pattern)
for (const key of memoCache.keys()) {
if (matches(key)) {
memoCache.delete(key)
}
}
}
// Delete matching persistent cache entries.
const removed = await cacacheClear({ prefix: fullPrefix })
return (removed ?? 0) as number
}
async function clear(options?: ClearOptions | undefined): Promise<void> {
const opts = { __proto__: null, ...options } as ClearOptions
// Clear in-memory cache.
memoCache.clear()
// If memoOnly, stop here.
if (opts.memoOnly) {
return
}
// Clear persistent cache.
await deleteAll()
}
return {
clear,
delete: deleteEntry,
deleteAll,
get,
getAll,
getOrFetch,
set,
}
}