diff --git a/node_modules/lru-cache/index.d.ts b/node_modules/lru-cache/index.d.ts
new file mode 100644
index 0000000000000..b9375a8b96a71
--- /dev/null
+++ b/node_modules/lru-cache/index.d.ts
@@ -0,0 +1,593 @@
+// Type definitions for lru-cache 7.10.0
+// Project: https://github.com/isaacs/node-lru-cache
+// Based initially on @types/lru-cache
+// https://github.com/DefinitelyTyped/DefinitelyTyped
+// used under the terms of the MIT License, shown below.
+//
+// DefinitelyTyped license:
+// ------
+// MIT License
+//
+// Copyright (c) Microsoft Corporation.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the "Software"),
+// to deal in the Software without restriction, including without limitation
+// the rights to use, copy, modify, merge, publish, distribute, sublicense,
+// and/or sell copies of the Software, and to permit persons to whom the
+// Software is furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
+// ------
+//
+// Changes by Isaac Z. Schlueter released under the terms found in the
+// LICENSE file within this project.
+
+///
+//tslint:disable:member-access
+declare class LRUCache implements Iterable<[K, V]> {
+ constructor(options: LRUCache.Options)
+
+ /**
+ * Number of items in the cache.
+ * Alias for `cache.size`
+ *
+ * @deprecated since 7.0 use `cache.size` instead
+ */
+ public readonly length: number
+
+ public readonly max: number
+ public readonly maxSize: number
+ public readonly sizeCalculation:
+ | LRUCache.SizeCalculator
+ | undefined
+ public readonly dispose: LRUCache.Disposer
+ /**
+ * @since 7.4.0
+ */
+ public readonly disposeAfter: LRUCache.Disposer | null
+ public readonly noDisposeOnSet: boolean
+ public readonly ttl: number
+ public readonly ttlResolution: number
+ public readonly ttlAutopurge: boolean
+ public readonly allowStale: boolean
+ public readonly updateAgeOnGet: boolean
+ /**
+ * @since 7.11.0
+ */
+ public readonly noDeleteOnStaleGet: boolean
+ /**
+ * @since 7.6.0
+ */
+ public readonly fetchMethod: LRUCache.Fetcher | null
+
+ /**
+ * The total number of items held in the cache at the current moment.
+ */
+ public readonly size: number
+
+ /**
+ * The total size of items in cache when using size tracking.
+ */
+ public readonly calculatedSize: number
+
+ /**
+ * Add a value to the cache.
+ */
+ public set(
+ key: K,
+ value: V,
+ options?: LRUCache.SetOptions
+ ): this
+
+ /**
+ * Return a value from the cache.
+ * Will update the recency of the cache entry found.
+ * If the key is not found, `get()` will return `undefined`.
+ * This can be confusing when setting values specifically to `undefined`,
+ * as in `cache.set(key, undefined)`. Use `cache.has()` to determine
+ * whether a key is present in the cache at all.
+ */
+ // tslint:disable-next-line:no-unnecessary-generics
+ public get(
+ key: K,
+ options?: LRUCache.GetOptions
+ ): T | undefined
+
+ /**
+ * Like `get()` but doesn't update recency or delete stale items.
+ * Returns `undefined` if the item is stale, unless `allowStale` is set
+ * either on the cache or in the options object.
+ */
+ // tslint:disable-next-line:no-unnecessary-generics
+ public peek(
+ key: K,
+ options?: LRUCache.PeekOptions
+ ): T | undefined
+
+ /**
+ * Check if a key is in the cache, without updating the recency of use.
+ * Will return false if the item is stale, even though it is technically
+ * in the cache.
+ * Will not update item age unless `updateAgeOnHas` is set in the options
+ * or constructor.
+ */
+ public has(key: K, options?: LRUCache.HasOptions): boolean
+
+ /**
+ * Deletes a key out of the cache.
+ * Returns true if the key was deleted, false otherwise.
+ */
+ public delete(key: K): boolean
+
+ /**
+ * Clear the cache entirely, throwing away all values.
+ */
+ public clear(): void
+
+ /**
+ * Delete any stale entries. Returns true if anything was removed, false
+ * otherwise.
+ */
+ public purgeStale(): boolean
+
+ /**
+ * Find a value for which the supplied fn method returns a truthy value,
+ * similar to Array.find(). fn is called as fn(value, key, cache).
+ */
+ // tslint:disable-next-line:no-unnecessary-generics
+ public find(
+ callbackFn: (
+ value: V,
+ key: K,
+ cache: this
+ ) => boolean | undefined | void,
+ options?: LRUCache.GetOptions
+ ): T
+
+ /**
+ * Call the supplied function on each item in the cache, in order from
+ * most recently used to least recently used. fn is called as
+ * fn(value, key, cache). Does not update age or recenty of use.
+ */
+ public forEach(
+ callbackFn: (this: T, value: V, key: K, cache: this) => void,
+ thisArg?: T
+ ): void
+
+ /**
+ * The same as `cache.forEach(...)` but items are iterated over in reverse
+ * order. (ie, less recently used items are iterated over first.)
+ */
+ public rforEach(
+ callbackFn: (this: T, value: V, key: K, cache: this) => void,
+ thisArg?: T
+ ): void
+
+ /**
+ * Return a generator yielding the keys in the cache,
+ * in order from most recently used to least recently used.
+ */
+ public keys(): Generator
+
+ /**
+ * Inverse order version of `cache.keys()`
+ * Return a generator yielding the keys in the cache,
+ * in order from least recently used to most recently used.
+ */
+ public rkeys(): Generator
+
+ /**
+ * Return a generator yielding the values in the cache,
+ * in order from most recently used to least recently used.
+ */
+ public values(): Generator
+
+ /**
+ * Inverse order version of `cache.values()`
+ * Return a generator yielding the values in the cache,
+ * in order from least recently used to most recently used.
+ */
+ public rvalues(): Generator
+
+ /**
+ * Return a generator yielding `[key, value]` pairs,
+ * in order from most recently used to least recently used.
+ */
+ public entries(): Generator<[K, V]>
+
+ /**
+ * Inverse order version of `cache.entries()`
+ * Return a generator yielding `[key, value]` pairs,
+ * in order from least recently used to most recently used.
+ */
+ public rentries(): Generator<[K, V]>
+
+ /**
+ * Iterating over the cache itself yields the same results as
+ * `cache.entries()`
+ */
+ public [Symbol.iterator](): Iterator<[K, V]>
+
+ /**
+ * Return an array of [key, entry] objects which can be passed to
+ * cache.load()
+ */
+ public dump(): Array<[K, LRUCache.Entry]>
+
+ /**
+ * Reset the cache and load in the items in entries in the order listed.
+ * Note that the shape of the resulting cache may be different if the
+ * same options are not used in both caches.
+ */
+ public load(
+ cacheEntries: ReadonlyArray<[K, LRUCache.Entry]>
+ ): void
+
+ /**
+ * Evict the least recently used item, returning its value or `undefined`
+ * if cache is empty.
+ */
+ public pop(): V | undefined
+
+ /**
+ * Deletes a key out of the cache.
+ *
+ * @deprecated since 7.0 use delete() instead
+ */
+ public del(key: K): boolean
+
+ /**
+ * Clear the cache entirely, throwing away all values.
+ *
+ * @deprecated since 7.0 use clear() instead
+ */
+ public reset(): void
+
+ /**
+ * Manually iterates over the entire cache proactively pruning old entries.
+ *
+ * @deprecated since 7.0 use purgeStale() instead
+ */
+ public prune(): boolean
+
+ /**
+ * since: 7.6.0
+ */
+ // tslint:disable-next-line:no-unnecessary-generics
+ public fetch(
+ key: K,
+ options?: LRUCache.FetchOptions
+ ): Promise
+
+ /**
+ * since: 7.6.0
+ */
+ public getRemainingTTL(key: K): number
+}
+
+declare namespace LRUCache {
+ type LRUMilliseconds = number
+ type DisposeReason = 'evict' | 'set' | 'delete'
+
+ type SizeCalculator = (value: V, key: K) => number
+ type Disposer = (
+ value: V,
+ key: K,
+ reason: DisposeReason
+ ) => void
+ type Fetcher = (
+ key: K,
+ staleValue: V,
+ options: FetcherOptions
+ ) => Promise | V | void | undefined
+
+ interface DeprecatedOptions {
+ /**
+ * alias for ttl
+ *
+ * @deprecated since 7.0 use options.ttl instead
+ */
+ maxAge?: number
+
+ /**
+ * alias for sizeCalculation
+ *
+ * @deprecated since 7.0 use options.sizeCalculation instead
+ */
+ length?: SizeCalculator
+
+ /**
+ * alias for allowStale
+ *
+ * @deprecated since 7.0 use options.allowStale instead
+ */
+ stale?: boolean
+ }
+
+ interface LimitedByCount {
+ /**
+ * The number of most recently used items to keep.
+ * Note that we may store fewer items than this if maxSize is hit.
+ */
+ max: number
+ }
+
+ interface LimitedBySize {
+ /**
+ * If you wish to track item size, you must provide a maxSize
+ * note that we still will only keep up to max *actual items*,
+ * if max is set, so size tracking may cause fewer than max items
+ * to be stored. At the extreme, a single item of maxSize size
+ * will cause everything else in the cache to be dropped when it
+ * is added. Use with caution!
+ * Note also that size tracking can negatively impact performance,
+ * though for most cases, only minimally.
+ */
+ maxSize: number
+
+ /**
+ * Function to calculate size of items. Useful if storing strings or
+ * buffers or other items where memory size depends on the object itself.
+ * Also note that oversized items do NOT immediately get dropped from
+ * the cache, though they will cause faster turnover in the storage.
+ */
+ sizeCalculation?: SizeCalculator
+ }
+
+ interface LimitedByTTL {
+ /**
+ * Max time in milliseconds for items to live in cache before they are
+ * considered stale. Note that stale items are NOT preemptively removed
+ * by default, and MAY live in the cache, contributing to its LRU max,
+ * long after they have expired.
+ *
+ * Also, as this cache is optimized for LRU/MRU operations, some of
+ * the staleness/TTL checks will reduce performance, as they will incur
+ * overhead by deleting items.
+ *
+ * Must be an integer number of ms, defaults to 0, which means "no TTL"
+ */
+ ttl: number
+
+ /**
+ * Boolean flag to tell the cache to not update the TTL when
+ * setting a new value for an existing key (ie, when updating a value
+ * rather than inserting a new value). Note that the TTL value is
+ * _always_ set (if provided) when adding a new entry into the cache.
+ *
+ * @default false
+ * @since 7.4.0
+ */
+ noUpdateTTL?: boolean
+
+ /**
+ * Minimum amount of time in ms in which to check for staleness.
+ * Defaults to 1, which means that the current time is checked
+ * at most once per millisecond.
+ *
+ * Set to 0 to check the current time every time staleness is tested.
+ * (This reduces performance, and is theoretically unnecessary.)
+ *
+ * Setting this to a higher value will improve performance somewhat
+ * while using ttl tracking, albeit at the expense of keeping stale
+ * items around a bit longer than intended.
+ *
+ * @default 1
+ * @since 7.1.0
+ */
+ ttlResolution?: number
+
+ /**
+ * Preemptively remove stale items from the cache.
+ * Note that this may significantly degrade performance,
+ * especially if the cache is storing a large number of items.
+ * It is almost always best to just leave the stale items in
+ * the cache, and let them fall out as new items are added.
+ *
+ * Note that this means that allowStale is a bit pointless,
+ * as stale items will be deleted almost as soon as they expire.
+ *
+ * Use with caution!
+ *
+ * @default false
+ * @since 7.1.0
+ */
+ ttlAutopurge?: boolean
+
+ /**
+ * Return stale items from cache.get() before disposing of them.
+ * Return stale values from cache.fetch() while performing a call
+ * to the `fetchMethod` in the background.
+ *
+ * @default false
+ */
+ allowStale?: boolean
+
+ /**
+ * Update the age of items on cache.get(), renewing their TTL
+ *
+ * @default false
+ */
+ updateAgeOnGet?: boolean
+
+ /**
+ * Do not delete stale items when they are retrieved with cache.get()
+ * Note that the get() return value will still be `undefined` unless
+ * allowStale is true.
+ *
+ * @default false
+ * @since 7.11.0
+ */
+ noDeleteOnStaleGet?: boolean
+
+ /**
+ * Update the age of items on cache.has(), renewing their TTL
+ *
+ * @default false
+ */
+ updateAgeOnHas?: boolean
+ }
+
+ type SafetyBounds =
+ | LimitedByCount
+ | LimitedBySize
+ | LimitedByTTL
+
+ // options shared by all three of the limiting scenarios
+ interface SharedOptions {
+ /**
+ * Function that is called on items when they are dropped from the cache.
+ * This can be handy if you want to close file descriptors or do other
+ * cleanup tasks when items are no longer accessible. Called with `key,
+ * value`. It's called before actually removing the item from the
+ * internal cache, so it is *NOT* safe to re-add them.
+ * Use `disposeAfter` if you wish to dispose items after they have been
+ * full removed, when it is safe to add them back to the cache.
+ */
+ dispose?: Disposer
+
+ /**
+ * The same as dispose, but called *after* the entry is completely
+ * removed and the cache is once again in a clean state. It is safe to
+ * add an item right back into the cache at this point.
+ * However, note that it is *very* easy to inadvertently create infinite
+ * recursion this way.
+ *
+ * @since 7.3.0
+ */
+ disposeAfter?: Disposer
+
+ /**
+ * Set to true to suppress calling the dispose() function if the entry
+ * key is still accessible within the cache.
+ * This may be overridden by passing an options object to cache.set().
+ *
+ * @default false
+ */
+ noDisposeOnSet?: boolean
+
+ /**
+ * `fetchMethod` Function that is used to make background asynchronous
+ * fetches. Called with `fetchMethod(key, staleValue)`. May return a
+ * Promise.
+ *
+ * If `fetchMethod` is not provided, then `cache.fetch(key)` is
+ * equivalent to `Promise.resolve(cache.get(key))`.
+ *
+ * @since 7.6.0
+ */
+ fetchMethod?: LRUCache.Fetcher
+
+ /**
+ * Set to true to suppress the deletion of stale data when a
+ * `fetchMethod` throws an error or returns a rejected promise
+ *
+ * @default false
+ * @since 7.10.0
+ */
+ noDeleteOnFetchRejection?: boolean
+
+ /**
+ * Set to any value in the constructor or fetch() options to
+ * pass arbitrary data to the fetch() method in the options.context
+ * field.
+ *
+ * @since 7.12.0
+ */
+ fetchContext?: any
+ }
+
+ type Options = SharedOptions &
+ DeprecatedOptions &
+ SafetyBounds
+
+ /**
+ * options which override the options set in the LRUCache constructor
+ * when making `cache.set()` calls.
+ */
+ interface SetOptions {
+ /**
+ * A value for the size of the entry, prevents calls to
+ * `sizeCalculation` function.
+ */
+ size?: number
+ sizeCalculation?: SizeCalculator
+ ttl?: number
+ start?: number
+ noDisposeOnSet?: boolean
+ noUpdateTTL?: boolean
+ }
+
+ /**
+ * options which override the options set in the LRUCAche constructor
+ * when making `cache.has()` calls.
+ */
+ interface HasOptions {
+ updateAgeOnHas?: boolean
+ }
+
+ /**
+ * options which override the options set in the LRUCache constructor
+ * when making `cache.get()` calls.
+ */
+ interface GetOptions {
+ allowStale?: boolean
+ updateAgeOnGet?: boolean
+ noDeleteOnStaleGet?: boolean
+ }
+
+ /**
+ * options which override the options set in the LRUCache constructor
+ * when making `cache.peek()` calls.
+ */
+ interface PeekOptions {
+ allowStale?: boolean
+ }
+
+ interface FetcherFetchOptions {
+ allowStale?: boolean
+ updateAgeOnGet?: boolean
+ noDeleteOnStaleGet?: boolean
+ size?: number
+ sizeCalculation?: SizeCalculator
+ ttl?: number
+ noDisposeOnSet?: boolean
+ noUpdateTTL?: boolean
+ noDeleteOnFetchRejection?: boolean
+ }
+
+ /**
+ * options which override the options set in the LRUCache constructor
+ * when making `cache.fetch()` calls.
+ * This is the union of GetOptions and SetOptions, plus the
+ * `noDeleteOnFetchRejection` and `fetchContext` fields.
+ */
+ interface FetchOptions extends FetcherFetchOptions {
+ fetchContext?: any
+ }
+
+ interface FetcherOptions {
+ signal: AbortSignal
+ options: FetcherFetchOptions
+ context: any
+ }
+
+ interface Entry {
+ value: V
+ ttl?: number
+ size?: number
+ start?: number
+ }
+}
+
+export = LRUCache
diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js
index fb1a076fa3ae8..479ffc8656b70 100644
--- a/node_modules/lru-cache/index.js
+++ b/node_modules/lru-cache/index.js
@@ -1,5 +1,9 @@
-const perf = typeof performance === 'object' && performance &&
- typeof performance.now === 'function' ? performance : Date
+const perf =
+ typeof performance === 'object' &&
+ performance &&
+ typeof performance.now === 'function'
+ ? performance
+ : Date
const hasAbortController = typeof AbortController === 'function'
@@ -7,20 +11,30 @@ const hasAbortController = typeof AbortController === 'function'
// this doesn't have nearly all the checks and whatnot that
// actual AbortController/Signal has, but it's enough for
// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController ? AbortController : Object.assign(
- class AbortController {
- constructor () { this.signal = new AC.AbortSignal }
- abort () {
- this.signal.dispatchEvent('abort')
- }
- },
- {
- AbortSignal: class AbortSignal {
- constructor () {
+const AC = hasAbortController
+ ? AbortController
+ : class AbortController {
+ constructor() {
+ this.signal = new AS()
+ }
+ abort() {
+ this.signal.dispatchEvent('abort')
+ }
+ }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+ ? AbortSignal
+ : hasACAbortSignal
+ ? AC.AbortController
+ : class AbortSignal {
+ constructor() {
this.aborted = false
this._listeners = []
}
- dispatchEvent (type) {
+ dispatchEvent(type) {
if (type === 'abort') {
this.aborted = true
const e = { type, target: this }
@@ -28,20 +42,18 @@ const AC = hasAbortController ? AbortController : Object.assign(
this._listeners.forEach(f => f(e), this)
}
}
- onabort () {}
- addEventListener (ev, fn) {
+ onabort() {}
+ addEventListener(ev, fn) {
if (ev === 'abort') {
this._listeners.push(fn)
}
}
- removeEventListener (ev, fn) {
+ removeEventListener(ev, fn) {
if (ev === 'abort') {
this._listeners = this._listeners.filter(f => f !== fn)
}
}
}
- }
-)
const warned = new Set()
const deprecatedOption = (opt, instead) => {
@@ -69,10 +81,10 @@ const deprecatedProperty = (field, instead) => {
const emitWarning = (...a) => {
typeof process === 'object' &&
- process &&
- typeof process.emitWarning === 'function'
- ? process.emitWarning(...a)
- : console.error(...a)
+ process &&
+ typeof process.emitWarning === 'function'
+ ? process.emitWarning(...a)
+ : console.error(...a)
}
const shouldWarn = code => !warned.has(code)
@@ -93,22 +105,28 @@ const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
* zeroes at init time is brutal when you get that big.
* But why not be complete?
* Maybe in the future, these limits will have expanded. */
-const getUintArray = max => !isPosInt(max) ? null
-: max <= Math.pow(2, 8) ? Uint8Array
-: max <= Math.pow(2, 16) ? Uint16Array
-: max <= Math.pow(2, 32) ? Uint32Array
-: max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-: null
+const getUintArray = max =>
+ !isPosInt(max)
+ ? null
+ : max <= Math.pow(2, 8)
+ ? Uint8Array
+ : max <= Math.pow(2, 16)
+ ? Uint16Array
+ : max <= Math.pow(2, 32)
+ ? Uint32Array
+ : max <= Number.MAX_SAFE_INTEGER
+ ? ZeroArray
+ : null
class ZeroArray extends Array {
- constructor (size) {
+ constructor(size) {
super(size)
this.fill(0)
}
}
class Stack {
- constructor (max) {
+ constructor(max) {
if (max === 0) {
return []
}
@@ -116,16 +134,16 @@ class Stack {
this.heap = new UintArray(max)
this.length = 0
}
- push (n) {
+ push(n) {
this.heap[this.length++] = n
}
- pop () {
+ pop() {
return this.heap[--this.length]
}
}
class LRUCache {
- constructor (options = {}) {
+ constructor(options = {}) {
const {
max = 0,
ttl,
@@ -141,15 +159,15 @@ class LRUCache {
maxSize = 0,
sizeCalculation,
fetchMethod,
+ fetchContext,
+ noDeleteOnFetchRejection,
+ noDeleteOnStaleGet,
} = options
// deprecated options, don't trigger a warning for getting them if
// the thing being passed in is another LRUCache we're copying.
- const {
- length,
- maxAge,
- stale,
- } = options instanceof LRUCache ? {} : options
+ const { length, maxAge, stale } =
+ options instanceof LRUCache ? {} : options
if (max !== 0 && !isPosInt(max)) {
throw new TypeError('max option must be a nonnegative integer')
@@ -165,7 +183,9 @@ class LRUCache {
this.sizeCalculation = sizeCalculation || length
if (this.sizeCalculation) {
if (!this.maxSize) {
- throw new TypeError('cannot set sizeCalculation without setting maxSize')
+ throw new TypeError(
+ 'cannot set sizeCalculation without setting maxSize'
+ )
}
if (typeof this.sizeCalculation !== 'function') {
throw new TypeError('sizeCalculation set to non-function')
@@ -174,7 +194,16 @@ class LRUCache {
this.fetchMethod = fetchMethod || null
if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
- throw new TypeError('fetchMethod must be a function if specified')
+ throw new TypeError(
+ 'fetchMethod must be a function if specified'
+ )
+ }
+
+ this.fetchContext = fetchContext
+ if (!this.fetchMethod && fetchContext !== undefined) {
+ throw new TypeError(
+ 'cannot set fetchContext without fetchMethod'
+ )
}
this.keyMap = new Map()
@@ -200,37 +229,48 @@ class LRUCache {
}
this.noDisposeOnSet = !!noDisposeOnSet
this.noUpdateTTL = !!noUpdateTTL
+ this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
if (this.maxSize !== 0) {
if (!isPosInt(this.maxSize)) {
- throw new TypeError('maxSize must be a positive integer if specified')
+ throw new TypeError(
+ 'maxSize must be a positive integer if specified'
+ )
}
this.initializeSizeTracking()
}
this.allowStale = !!allowStale || !!stale
+ this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
this.updateAgeOnGet = !!updateAgeOnGet
this.updateAgeOnHas = !!updateAgeOnHas
- this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0
- ? ttlResolution : 1
+ this.ttlResolution =
+ isPosInt(ttlResolution) || ttlResolution === 0
+ ? ttlResolution
+ : 1
this.ttlAutopurge = !!ttlAutopurge
this.ttl = ttl || maxAge || 0
if (this.ttl) {
if (!isPosInt(this.ttl)) {
- throw new TypeError('ttl must be a positive integer if specified')
+ throw new TypeError(
+ 'ttl must be a positive integer if specified'
+ )
}
this.initializeTTLTracking()
}
// do not allow completely unbounded caches
if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
- throw new TypeError('At least one of max, maxSize, or ttl is required')
+ throw new TypeError(
+ 'At least one of max, maxSize, or ttl is required'
+ )
}
if (!this.ttlAutopurge && !this.max && !this.maxSize) {
const code = 'LRU_CACHE_UNBOUNDED'
if (shouldWarn(code)) {
warned.add(code)
- const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+ const msg =
+ 'TTL caching without ttlAutopurge, max, or maxSize can ' +
'result in unbounded memory consumption.'
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
}
@@ -247,16 +287,16 @@ class LRUCache {
}
}
- getRemainingTTL (key) {
+ getRemainingTTL(key) {
return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
}
- initializeTTLTracking () {
+ initializeTTLTracking() {
this.ttls = new ZeroArray(this.max)
this.starts = new ZeroArray(this.max)
- this.setItemTTL = (index, ttl) => {
- this.starts[index] = ttl !== 0 ? perf.now() : 0
+ this.setItemTTL = (index, ttl, start = perf.now()) => {
+ this.starts[index] = ttl !== 0 ? start : 0
this.ttls[index] = ttl
if (ttl !== 0 && this.ttlAutopurge) {
const t = setTimeout(() => {
@@ -271,7 +311,7 @@ class LRUCache {
}
}
- this.updateItemAge = (index) => {
+ this.updateItemAge = index => {
this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
}
@@ -282,7 +322,10 @@ class LRUCache {
const n = perf.now()
if (this.ttlResolution > 0) {
cachedNow = n
- const t = setTimeout(() => cachedNow = 0, this.ttlResolution)
+ const t = setTimeout(
+ () => (cachedNow = 0),
+ this.ttlResolution
+ )
/* istanbul ignore else - not available on all platforms */
if (t.unref) {
t.unref()
@@ -291,28 +334,38 @@ class LRUCache {
return n
}
- this.getRemainingTTL = (key) => {
+ this.getRemainingTTL = key => {
const index = this.keyMap.get(key)
if (index === undefined) {
return 0
}
- return this.ttls[index] === 0 || this.starts[index] === 0 ? Infinity
- : ((this.starts[index] + this.ttls[index]) - (cachedNow || getNow()))
+ return this.ttls[index] === 0 || this.starts[index] === 0
+ ? Infinity
+ : this.starts[index] +
+ this.ttls[index] -
+ (cachedNow || getNow())
}
- this.isStale = (index) => {
- return this.ttls[index] !== 0 && this.starts[index] !== 0 &&
- ((cachedNow || getNow()) - this.starts[index] > this.ttls[index])
+ this.isStale = index => {
+ return (
+ this.ttls[index] !== 0 &&
+ this.starts[index] !== 0 &&
+ (cachedNow || getNow()) - this.starts[index] >
+ this.ttls[index]
+ )
}
}
- updateItemAge (index) {}
- setItemTTL (index, ttl) {}
- isStale (index) { return false }
+ updateItemAge(index) {}
+ setItemTTL(index, ttl, start) {}
+ isStale(index) {
+ return false
+ }
- initializeSizeTracking () {
+ initializeSizeTracking() {
this.calculatedSize = 0
this.sizes = new ZeroArray(this.max)
- this.removeItemSize = index => this.calculatedSize -= this.sizes[index]
+ this.removeItemSize = index =>
+ (this.calculatedSize -= this.sizes[index])
this.requireSize = (k, v, size, sizeCalculation) => {
if (!isPosInt(size)) {
if (sizeCalculation) {
@@ -321,10 +374,14 @@ class LRUCache {
}
size = sizeCalculation(v, k)
if (!isPosInt(size)) {
- throw new TypeError('sizeCalculation return invalid (expect positive integer)')
+ throw new TypeError(
+ 'sizeCalculation return invalid (expect positive integer)'
+ )
}
} else {
- throw new TypeError('invalid size value (must be positive integer)')
+ throw new TypeError(
+ 'invalid size value (must be positive integer)'
+ )
}
}
return size
@@ -338,15 +395,17 @@ class LRUCache {
this.calculatedSize += this.sizes[index]
}
}
- removeItemSize (index) {}
- addItemSize (index, v, k, size) {}
- requireSize (k, v, size, sizeCalculation) {
+ removeItemSize(index) {}
+ addItemSize(index, v, k, size) {}
+ requireSize(k, v, size, sizeCalculation) {
if (size || sizeCalculation) {
- throw new TypeError('cannot set size without setting maxSize on cache')
+ throw new TypeError(
+ 'cannot set size without setting maxSize on cache'
+ )
}
}
- *indexes ({ allowStale = this.allowStale } = {}) {
+ *indexes({ allowStale = this.allowStale } = {}) {
if (this.size) {
for (let i = this.tail; true; ) {
if (!this.isValidIndex(i)) {
@@ -364,7 +423,7 @@ class LRUCache {
}
}
- *rindexes ({ allowStale = this.allowStale } = {}) {
+ *rindexes({ allowStale = this.allowStale } = {}) {
if (this.size) {
for (let i = this.head; true; ) {
if (!this.isValidIndex(i)) {
@@ -382,48 +441,48 @@ class LRUCache {
}
}
- isValidIndex (index) {
+ isValidIndex(index) {
return this.keyMap.get(this.keyList[index]) === index
}
- *entries () {
+ *entries() {
for (const i of this.indexes()) {
yield [this.keyList[i], this.valList[i]]
}
}
- *rentries () {
+ *rentries() {
for (const i of this.rindexes()) {
yield [this.keyList[i], this.valList[i]]
}
}
- *keys () {
+ *keys() {
for (const i of this.indexes()) {
yield this.keyList[i]
}
}
- *rkeys () {
+ *rkeys() {
for (const i of this.rindexes()) {
yield this.keyList[i]
}
}
- *values () {
+ *values() {
for (const i of this.indexes()) {
yield this.valList[i]
}
}
- *rvalues () {
+ *rvalues() {
for (const i of this.rindexes()) {
yield this.valList[i]
}
}
- [Symbol.iterator] () {
+ [Symbol.iterator]() {
return this.entries()
}
- find (fn, getOptions = {}) {
+ find(fn, getOptions = {}) {
for (const i of this.indexes()) {
if (fn(this.valList[i], this.keyList[i], this)) {
return this.get(this.keyList[i], getOptions)
@@ -431,24 +490,24 @@ class LRUCache {
}
}
- forEach (fn, thisp = this) {
+ forEach(fn, thisp = this) {
for (const i of this.indexes()) {
fn.call(thisp, this.valList[i], this.keyList[i], this)
}
}
- rforEach (fn, thisp = this) {
+ rforEach(fn, thisp = this) {
for (const i of this.rindexes()) {
fn.call(thisp, this.valList[i], this.keyList[i], this)
}
}
- get prune () {
+ get prune() {
deprecatedMethod('prune', 'purgeStale')
return this.purgeStale
}
- purgeStale () {
+ purgeStale() {
let deleted = false
for (const i of this.rindexes({ allowStale: true })) {
if (this.isStale(i)) {
@@ -459,14 +518,19 @@ class LRUCache {
return deleted
}
- dump () {
+ dump() {
const arr = []
- for (const i of this.indexes()) {
+ for (const i of this.indexes({ allowStale: true })) {
const key = this.keyList[i]
- const value = this.valList[i]
+ const v = this.valList[i]
+ const value = this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
const entry = { value }
if (this.ttls) {
entry.ttl = this.ttls[i]
+ // always dump the start relative to a portable timestamp
+ // it's ok for this to be a bit slow, it's a rare operation.
+ const age = perf.now() - this.starts[i]
+ entry.start = Math.floor(Date.now() - age)
}
if (this.sizes) {
entry.size = this.sizes[i]
@@ -476,22 +540,34 @@ class LRUCache {
return arr
}
- load (arr) {
+ load(arr) {
this.clear()
for (const [key, entry] of arr) {
+ if (entry.start) {
+ // entry.start is a portable timestamp, but we may be using
+ // node's performance.now(), so calculate the offset.
+ // it's ok for this to be a bit slow, it's a rare operation.
+ const age = Date.now() - entry.start
+ entry.start = perf.now() - age
+ }
this.set(key, entry.value, entry)
}
}
- dispose (v, k, reason) {}
+ dispose(v, k, reason) {}
- set (k, v, {
- ttl = this.ttl,
- noDisposeOnSet = this.noDisposeOnSet,
- size = 0,
- sizeCalculation = this.sizeCalculation,
- noUpdateTTL = this.noUpdateTTL,
- } = {}) {
+ set(
+ k,
+ v,
+ {
+ ttl = this.ttl,
+ start,
+ noDisposeOnSet = this.noDisposeOnSet,
+ size = 0,
+ sizeCalculation = this.sizeCalculation,
+ noUpdateTTL = this.noUpdateTTL,
+ } = {}
+ ) {
size = this.requireSize(k, v, size, sizeCalculation)
let index = this.size === 0 ? undefined : this.keyMap.get(k)
if (index === undefined) {
@@ -503,7 +579,7 @@ class LRUCache {
this.next[this.tail] = index
this.prev[index] = this.tail
this.tail = index
- this.size ++
+ this.size++
this.addItemSize(index, v, k, size)
noUpdateTTL = false
} else {
@@ -530,7 +606,7 @@ class LRUCache {
this.initializeTTLTracking()
}
if (!noUpdateTTL) {
- this.setItemTTL(index, ttl)
+ this.setItemTTL(index, ttl, start)
}
if (this.disposeAfter) {
while (this.disposed.length) {
@@ -540,7 +616,7 @@ class LRUCache {
return this
}
- newIndex () {
+ newIndex() {
if (this.size === 0) {
return this.tail
}
@@ -554,7 +630,7 @@ class LRUCache {
return this.initialFill++
}
- pop () {
+ pop() {
if (this.size) {
const val = this.valList[this.head]
this.evict(true)
@@ -562,7 +638,7 @@ class LRUCache {
}
}
- evict (free) {
+ evict(free) {
const head = this.head
const k = this.keyList[head]
const v = this.valList[head]
@@ -583,11 +659,11 @@ class LRUCache {
}
this.head = this.next[head]
this.keyMap.delete(k)
- this.size --
+ this.size--
return head
}
- has (k, { updateAgeOnHas = this.updateAgeOnHas } = {}) {
+ has(k, { updateAgeOnHas = this.updateAgeOnHas } = {}) {
const index = this.keyMap.get(k)
if (index !== undefined) {
if (!this.isStale(index)) {
@@ -601,14 +677,14 @@ class LRUCache {
}
// like get(), but without any LRU updating or TTL expiration
- peek (k, { allowStale = this.allowStale } = {}) {
+ peek(k, { allowStale = this.allowStale } = {}) {
const index = this.keyMap.get(k)
if (index !== undefined && (allowStale || !this.isStale(index))) {
return this.valList[index]
}
}
- backgroundFetch (k, index, options) {
+ backgroundFetch(k, index, options, context) {
const v = index === undefined ? undefined : this.valList[index]
if (this.isBackgroundFetch(v)) {
return v
@@ -617,15 +693,36 @@ class LRUCache {
const fetchOpts = {
signal: ac.signal,
options,
+ context,
}
- const p = Promise.resolve(this.fetchMethod(k, v, fetchOpts)).then(v => {
+ const cb = v => {
if (!ac.signal.aborted) {
this.set(k, v, fetchOpts.options)
}
return v
- })
+ }
+ const eb = er => {
+ if (this.valList[index] === p) {
+ const del =
+ !options.noDeleteOnFetchRejection ||
+ p.__staleWhileFetching === undefined
+ if (del) {
+ this.delete(k)
+ } else {
+ // still replace the *promise* with the stale value,
+ // since we are done with the promise at this point.
+ this.valList[index] = p.__staleWhileFetching
+ }
+ }
+ if (p.__returned === p) {
+ throw er
+ }
+ }
+ const pcall = res => res(this.fetchMethod(k, v, fetchOpts))
+ const p = new Promise(pcall).then(cb, eb)
p.__abortController = ac
p.__staleWhileFetching = v
+ p.__returned = null
if (index === undefined) {
this.set(k, p, fetchOpts.options)
index = this.keyMap.get(k)
@@ -635,44 +732,66 @@ class LRUCache {
return p
}
- isBackgroundFetch (p) {
- return p && typeof p === 'object' && typeof p.then === 'function' &&
- Object.prototype.hasOwnProperty.call(p, '__staleWhileFetching')
+ isBackgroundFetch(p) {
+ return (
+ p &&
+ typeof p === 'object' &&
+ typeof p.then === 'function' &&
+ Object.prototype.hasOwnProperty.call(
+ p,
+ '__staleWhileFetching'
+ ) &&
+ Object.prototype.hasOwnProperty.call(p, '__returned') &&
+ (p.__returned === p || p.__returned === null)
+ )
}
// this takes the union of get() and set() opts, because it does both
- async fetch (k, {
- allowStale = this.allowStale,
- updateAgeOnGet = this.updateAgeOnGet,
- ttl = this.ttl,
- noDisposeOnSet = this.noDisposeOnSet,
- size = 0,
- sizeCalculation = this.sizeCalculation,
- noUpdateTTL = this.noUpdateTTL,
- } = {}) {
+ async fetch(
+ k,
+ {
+ // get options
+ allowStale = this.allowStale,
+ updateAgeOnGet = this.updateAgeOnGet,
+ noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+ // set options
+ ttl = this.ttl,
+ noDisposeOnSet = this.noDisposeOnSet,
+ size = 0,
+ sizeCalculation = this.sizeCalculation,
+ noUpdateTTL = this.noUpdateTTL,
+ // fetch exclusive options
+ noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+ fetchContext = this.fetchContext,
+ } = {}
+ ) {
if (!this.fetchMethod) {
- return this.get(k, {allowStale, updateAgeOnGet})
+ return this.get(k, { allowStale, updateAgeOnGet, noDeleteOnStaleGet })
}
const options = {
allowStale,
updateAgeOnGet,
+ noDeleteOnStaleGet,
ttl,
noDisposeOnSet,
size,
sizeCalculation,
noUpdateTTL,
+ noDeleteOnFetchRejection,
}
let index = this.keyMap.get(k)
if (index === undefined) {
- return this.backgroundFetch(k, index, options)
+ const p = this.backgroundFetch(k, index, options, fetchContext)
+ return (p.__returned = p)
} else {
// in cache, maybe already fetching
const v = this.valList[index]
if (this.isBackgroundFetch(v)) {
return allowStale && v.__staleWhileFetching !== undefined
- ? v.__staleWhileFetching : v
+ ? v.__staleWhileFetching
+ : (v.__returned = v)
}
if (!this.isStale(index)) {
@@ -685,16 +804,21 @@ class LRUCache {
// ok, it is stale, and not already fetching
// refresh the cache.
- const p = this.backgroundFetch(k, index, options)
+ const p = this.backgroundFetch(k, index, options, fetchContext)
return allowStale && p.__staleWhileFetching !== undefined
- ? p.__staleWhileFetching : p
+ ? p.__staleWhileFetching
+ : (p.__returned = p)
}
}
- get (k, {
- allowStale = this.allowStale,
- updateAgeOnGet = this.updateAgeOnGet,
- } = {}) {
+ get(
+ k,
+ {
+ allowStale = this.allowStale,
+ updateAgeOnGet = this.updateAgeOnGet,
+ noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+ } = {}
+ ) {
const index = this.keyMap.get(k)
if (index !== undefined) {
const value = this.valList[index]
@@ -702,7 +826,9 @@ class LRUCache {
if (this.isStale(index)) {
// delete only if not an in-flight background fetch
if (!fetching) {
- this.delete(k)
+ if (!noDeleteOnStaleGet) {
+ this.delete(k)
+ }
return allowStale ? value : undefined
} else {
return allowStale ? value.__staleWhileFetching : undefined
@@ -723,12 +849,12 @@ class LRUCache {
}
}
- connect (p, n) {
+ connect(p, n) {
this.prev[n] = p
this.next[p] = n
}
- moveToTail (index) {
+ moveToTail(index) {
// if tail already, nothing to do
// if head, move head to next[index]
// else
@@ -748,12 +874,12 @@ class LRUCache {
}
}
- get del () {
+ get del() {
deprecatedMethod('del', 'delete')
return this.delete
}
- delete (k) {
+ delete(k) {
let deleted = false
if (this.size !== 0) {
const index = this.keyMap.get(k)
@@ -783,7 +909,7 @@ class LRUCache {
this.next[this.prev[index]] = this.next[index]
this.prev[this.next[index]] = this.prev[index]
}
- this.size --
+ this.size--
this.free.push(index)
}
}
@@ -796,7 +922,7 @@ class LRUCache {
return deleted
}
- clear () {
+ clear() {
for (const index of this.rindexes({ allowStale: true })) {
const v = this.valList[index]
if (this.isBackgroundFetch(v)) {
@@ -833,19 +959,22 @@ class LRUCache {
}
}
- get reset () {
+ get reset() {
deprecatedMethod('reset', 'clear')
return this.clear
}
- get length () {
+ get length() {
deprecatedProperty('length', 'size')
return this.size
}
- static get AbortController () {
+ static get AbortController() {
return AC
}
+ static get AbortSignal() {
+ return AS
+ }
}
module.exports = LRUCache
diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json
index 5364b09d2002c..c023ce6c49aca 100644
--- a/node_modules/lru-cache/package.json
+++ b/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
- "version": "7.9.0",
+ "version": "7.12.0",
"author": "Isaac Z. Schlueter ",
"keywords": [
"mru",
@@ -10,34 +10,60 @@
],
"scripts": {
"build": "",
+ "size": "size-limit",
"test": "tap",
"snap": "tap",
- "size": "size-limit",
"preversion": "npm test",
"postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags"
+ "prepublishOnly": "git push origin --follow-tags",
+ "format": "prettier --write ."
},
"main": "index.js",
"repository": "git://github.com/isaacs/node-lru-cache.git",
"devDependencies": {
"@size-limit/preset-small-lib": "^7.0.8",
+ "@types/node": "^17.0.31",
+ "@types/tap": "^15.0.6",
"benchmark": "^2.1.4",
+ "c8": "^7.11.2",
"clock-mock": "^1.0.4",
+ "eslint-config-prettier": "^8.5.0",
+ "prettier": "^2.6.2",
"size-limit": "^7.0.8",
- "tap": "^15.1.6"
+ "tap": "^16.0.1",
+ "ts-node": "^10.7.0",
+ "tslib": "^2.4.0",
+ "typescript": "^4.6.4"
},
"license": "ISC",
"files": [
- "index.js"
+ "index.js",
+ "index.d.ts"
],
"engines": {
"node": ">=12"
},
+ "prettier": {
+ "semi": false,
+ "printWidth": 70,
+ "tabWidth": 2,
+ "useTabs": false,
+ "singleQuote": true,
+ "jsxSingleQuote": false,
+ "bracketSameLine": true,
+ "arrowParens": "avoid",
+ "endOfLine": "lf"
+ },
"tap": {
- "coverage-map": "map.js",
+ "nyc-arg": [
+ "--include=index.js"
+ ],
"node-arg": [
- "--expose-gc"
- ]
+ "--expose-gc",
+ "--require",
+ "ts-node/register"
+ ],
+ "ts": false
},
"size-limit": [
{
diff --git a/package-lock.json b/package-lock.json
index 73e0d0dd918b3..b63ab00276837 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -4578,9 +4578,9 @@
}
},
"node_modules/lru-cache": {
- "version": "7.9.0",
- "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.9.0.tgz",
- "integrity": "sha512-lkcNMUKqdJk96TuIXUidxaPuEg5sJo/+ZyVE2BDFnuZGzwXem7d8582eG8vbu4todLfT14snP6iHriCHXXi5Rw==",
+ "version": "7.12.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.12.0.tgz",
+ "integrity": "sha512-OIP3DwzRZDfLg9B9VP/huWBlpvbkmbfiBy8xmsXp4RPmE4A3MhwNozc5ZJ3fWnSg8fDcdlE/neRTPG2ycEKliw==",
"inBundle": true,
"engines": {
"node": ">=12"