Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: captcha time use seconds #3572

Merged
merged 4 commits into from
Jan 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 3 additions & 7 deletions packages-legacy/cache/src/service/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import * as cacheManager from 'cache-manager';
@Provide()
@Scope(ScopeEnum.Singleton)
export class CacheManager {
cache: cacheManager.Cache;
cache;

@Config('cache')
cacheConfig;
Expand All @@ -20,7 +20,7 @@ export class CacheManager {
// 获取key
async get<T>(key: string): Promise<T> {
return new Promise((resolve, reject) => {
this.cache.get<T>(key, (err, result) => {
this.cache.get(key, (err, result) => {
if (err) {
reject(err);
return;
Expand All @@ -31,11 +31,7 @@ export class CacheManager {
}

// 设置cache
async set<T>(
key: string,
value: T,
options?: cacheManager.CachingConfig
): Promise<T> {
async set<T>(key: string, value: T, options?: any): Promise<T> {
return await this.cache.set(key, value, options);
}

Expand Down
4 changes: 2 additions & 2 deletions packages/cache-manager/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"cache-manager-ioredis-yet": "1.2.2"
},
"dependencies": {
"cache-manager": "5.3.2",
"promise-coalesce": "1.1.1"
"lodash.clonedeep": "4.5.0",
"lru-cache": "7.18.3"
}
}
250 changes: 250 additions & 0 deletions packages/cache-manager/src/base/cacheManager.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,250 @@
/**
* fork from https://github.com/node-cache-manager/node-cache-manager/tree/master
* reason: Support node.js v18 version below and add some features
* for example: add methodWrap
*/

import { coalesceAsync } from './prmoiseCoalesce';
import {
Cache,
CachingConfig,
Config,
FactoryConfig,
FactoryStore,
MemoryCache,
MemoryConfig,
MemoryStore,
Milliseconds,
MultiCache,
Store,
Stores,
WrapTTL,
} from './types';
import { memoryStore } from './store';

export async function caching(
name: 'memory',
args?: MemoryConfig
): Promise<MemoryCache>;
export async function caching<S extends Store>(store: S): Promise<Cache<S>>;
export async function caching<S extends Store, T extends object = never>(
factory: FactoryStore<S, T>,
args?: FactoryConfig<T>
): Promise<Cache<S>>;

/**
* Generic caching interface that wraps any caching library with a compatible interface.
*/
export async function caching<S extends Store, T extends object = never>(
factory: Stores<S, T>,
args?: CachingConfig<T>
): Promise<Cache<S> | Cache<Store> | MemoryCache> {
if (factory === 'memory') {
const store = memoryStore(args as MemoryConfig);
return createCache(store, args as MemoryConfig);
}
if (typeof factory === 'function') {
const store = await factory(args as FactoryConfig<T>);
return createCache(store, args);
}

return createCache(factory, args);
}

export function createCache(
store: MemoryStore,
args?: MemoryConfig
): MemoryCache;

export function createCache(store: Store, args?: Config): Cache<Store>;

/**
* Create cache instance by store (non-async).
*/
export function createCache<S extends Store, C extends Config>(
store: S,
args?: C
): Cache<S> {
return {
/**
* Wraps a function in cache. I.e., the first time the function is run,
* its results are stored in cache so subsequent calls retrieve from cache
* instead of calling the function.

* @example
* const result = await cache.wrap('key', () => Promise.resolve(1));
*
*/
wrap: async <T>(key: string, fn: () => Promise<T>, ttl?: WrapTTL<T>) => {
return coalesceAsync(key, async () => {
const value = await store.get<T>(key);
if (value === undefined) {
const result = await fn();
const cacheTTL = typeof ttl === 'function' ? ttl(result) : ttl;
await store.set<T>(key, result, cacheTTL);
return result;
} else if (args?.refreshThreshold) {
const cacheTTL = typeof ttl === 'function' ? ttl(value) : ttl;
const remainingTtl = await store.ttl(key);
if (remainingTtl !== -1 && remainingTtl < args.refreshThreshold) {
coalesceAsync(`+++${key}`, fn).then(result =>
store.set<T>(key, result, cacheTTL)
);
}
}
return value;
});
},
store: store as S,
del: (key: string) => store.del(key),
get: <T>(key: string) => store.get<T>(key),
set: (key: string, value: unknown, ttl?: Milliseconds) =>
store.set(key, value, ttl),
reset: () => store.reset(),
/**
* cache function with args
*/
methodWrap: async <T>(
key: string,
fn: (...args) => Promise<T>,
fnArgs: any[],
ttl?: WrapTTL<T>
): Promise<T> => {
return coalesceAsync(key, async () => {
const value = await store.get<T>(key);
if (value === undefined) {
const result = await fn(...fnArgs);
const cacheTTL = typeof ttl === 'function' ? ttl(result) : ttl;
await store.set(key, result, cacheTTL);
return result;
} else if (args?.refreshThreshold) {
const cacheTTL = typeof ttl === 'function' ? ttl(value) : ttl;
const remainingTtl = await store.ttl(key);
if (remainingTtl !== -1 && remainingTtl < args.refreshThreshold) {
// fn(...fnArgs).then(result => store.set(key, result, cacheTTL));
coalesceAsync<T>(`+++${key}`, () => fn(...fnArgs)).then(result =>
store.set<T>(key, result, cacheTTL)
);
}
}
return value;
});
},
};
}

/**
* Module that lets you specify a hierarchy of caches.
*/
export function multiCaching<Caches extends Cache[]>(
caches: Caches
): MultiCache {
const get = async <T>(key: string) => {
for (const cache of caches) {
try {
const val = await cache.get<T>(key);
if (val !== undefined) return val;
} catch (e) {
// ignore
}
}
};
const set = async <T>(
key: string,
data: T,
ttl?: Milliseconds | undefined
) => {
await Promise.all(caches.map(cache => cache.set(key, data, ttl)));
};

return {
get,
set,
del: async key => {
await Promise.all(caches.map(cache => cache.del(key)));
},
async wrap<T>(
key: string,
fn: () => Promise<T>,
ttl?: WrapTTL<T>
): Promise<T> {
let value: T | undefined;
let i = 0;
for (; i < caches.length; i++) {
try {
value = await caches[i].get<T>(key);
if (value !== undefined) break;
} catch (e) {
// ignore
}
}
if (value === undefined) {
const result = await fn();
const cacheTTL = typeof ttl === 'function' ? ttl(result) : ttl;
await set<T>(key, result, cacheTTL);
return result;
} else {
const cacheTTL = typeof ttl === 'function' ? ttl(value) : ttl;
Promise.all(
caches.slice(0, i).map(cache => cache.set(key, value, cacheTTL))
).then();
caches[i].wrap(key, fn, ttl).then(); // call wrap for store for internal refreshThreshold logic, see: src/caching.ts caching.wrap
}
return value;
},
reset: async () => {
await Promise.all(caches.map(x => x.reset()));
},
mget: async (...keys: string[]) => {
const values = new Array(keys.length).fill(undefined);
for (const cache of caches) {
if (values.every(x => x !== undefined)) break;
try {
const val = await cache.store.mget(...keys);
val.forEach((v, i) => {
if (values[i] === undefined && v !== undefined) values[i] = v;
});
} catch (e) {
// ignore
}
}
return values;
},
mset: async (args: [string, unknown][], ttl?: Milliseconds) => {
await Promise.all(caches.map(cache => cache.store.mset(args, ttl)));
},
mdel: async (...keys: string[]) => {
await Promise.all(caches.map(cache => cache.store.mdel(...keys)));
},
methodWrap: async <T>(
key: string,
fn: (...args) => Promise<T>,
fnArgs: any[],
ttl?: WrapTTL<T>
): Promise<T> => {
let value: T | undefined;
let i = 0;
for (; i < caches.length; i++) {
try {
value = await caches[i].get<T>(key);
if (value !== undefined) break;
} catch (e) {
// ignore
}
}
if (value === undefined) {
const result = await fn(...fnArgs);
const cacheTTL = typeof ttl === 'function' ? ttl(result) : ttl;
await set(key, result, cacheTTL);
return result;
} else {
const cacheTTL = typeof ttl === 'function' ? ttl(value) : ttl;
Promise.all(
caches.slice(0, i).map(cache => cache.set(key, value, cacheTTL))
).then();
caches[i].methodWrap(key, fn, fnArgs, ttl).then(); // call wrap for store for internal refreshThreshold logic, see: src/caching.ts caching.wrap
}
return value;
},
};
}
3 changes: 3 additions & 0 deletions packages/cache-manager/src/base/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export * from './cacheManager';
export * from './store';
export * from './types';
97 changes: 97 additions & 0 deletions packages/cache-manager/src/base/prmoiseCoalesce.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
/**
* for from https://github.com/douglascayers/promise-coalesce/blob/main/src/coalesce.ts
* reason: Support node.js >= v12
*/
interface PromiseCallback<T = any, E = Error> {
resolve: ResolveFunction<T>;
reject: RejectFunction<E>;
}

type ResolveFunction<T = any> = (value: T | PromiseLike<T>) => void;

type RejectFunction<E = Error> = (reason: E) => void;

const callbacks = new Map<string, Array<PromiseCallback<any>>>();

/**
* Enqueue a promise for the group identified by `key`.
*
* All requests received for the same key while a request for that key
* is already being executed will wait. Once the running request settles
* then all the waiting requests in the group will settle, too.
* This minimizes how many times the function itself runs at the same time.
* This function resolves or rejects according to the given function argument.
*/
export async function coalesceAsync<T>(
/**
* Any identifier to group requests together.
*/
key: string,
/**
* The function to run.
*/
fn: () => T | PromiseLike<T>
): Promise<T> {
if (!hasKey(key)) {
addKey(key);
try {
const result = await Promise.resolve(fn());
coalesce({ key, result });
return result;
} catch (error) {
coalesce({ key, error });
throw error;
}
}
return enqueue(key);
}

function hasKey(key: string): boolean {
return callbacks.has(key);
}

function addKey(key: string): void {
callbacks.set(key, []);
}

function removeKey(key: string): void {
callbacks.delete(key);
}

function addCallbackToKey<T>(key: string, callback: PromiseCallback<T>): void {
const stash = getCallbacksByKey<T>(key);
stash.push(callback);
callbacks.set(key, stash);
}

function getCallbacksByKey<T>(key: string): Array<PromiseCallback<T>> {
return callbacks.get(key) ?? [];
}

function enqueue<T>(key: string): Promise<T> {
return new Promise<T>((resolve, reject) => {
const callback: PromiseCallback<T> = { resolve, reject };
addCallbackToKey(key, callback);
});
}

function dequeue<T>(key: string): Array<PromiseCallback<T>> {
const stash = getCallbacksByKey<T>(key);
removeKey(key);
return stash;
}

function coalesce<T>(options: {
key: string;
error?: Error;
result?: T;
}): void {
const { key, error, result } = options;
dequeue(key).forEach(callback => {
if (error) {
callback.reject(error);
} else {
callback.resolve(result);
}
});
}
Loading
Loading