Skip to content

Commit

Permalink
feat: cache valid policy #189
Browse files Browse the repository at this point in the history
  • Loading branch information
Soontao committed Jan 8, 2021
1 parent d7dc33a commit 5b824e1
Show file tree
Hide file tree
Showing 4 changed files with 194 additions and 23 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/deno.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@ name: Deno CI

on:
push:
branches: '*'
branches:
- '*'
pull_request:
branches: '*'
branches:
- '*'
schedule:
- cron: '0 2 * * *'

Expand Down
6 changes: 4 additions & 2 deletions .github/workflows/nodejs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@ name: Github CI

on:
push:
branches: '*'
branches:
- '*'
pull_request:
branches: '*'
branches:
- '*'
# daily check at 01:00 AM
schedule:
- cron: '0 1 * * *'
Expand Down
132 changes: 115 additions & 17 deletions src/cacheProvider.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,29 @@
import { LRUMap } from './functional/LRUMap';
import { GeneralFunction } from './types';

export interface CachePolicy {
cacheUndefined?: boolean;
cacheNull?: boolean;
cacheThrow?: boolean;
}

export interface CacheConfig<T> {
policy?: CachePolicy;
params?: T;
}


const DEFAULT_CACHE_POLICY: CachePolicy = {
cacheUndefined: false,
cacheNull: false,
cacheThrow: false
};

class CachedThrowError {
private readonly _error: Error;
constructor(error: Error) { this._error = error; }
getError() { return this._error; }
}

export interface CacheProvider<K, V> extends Map<K, V> {

Expand All @@ -10,7 +33,7 @@ export interface CacheProvider<K, V> extends Map<K, V> {
* @param key
* @param producer
*/
getOrCreate(key: K, producer: GeneralFunction<any[], V>): V
getOrCreate<R>(key: K, producer: GeneralFunction<any[], R>): R

}

Expand All @@ -19,6 +42,10 @@ export interface CacheProvider<K, V> extends Map<K, V> {
*/
export interface AsyncCacheProvider<K, V> extends CacheProvider<K, Promise<V>> { }

interface LRUCacheProviderParam {
maxEntry: number;
}

/**
* LRU Cache Provider
*
Expand All @@ -27,25 +54,83 @@ export interface AsyncCacheProvider<K, V> extends CacheProvider<K, Promise<V>> {
*/
export class LRUCacheProvider<K = any, V = any> extends LRUMap implements CacheProvider<K, V> {

public getOrCreate(key: K, producer: GeneralFunction<[], V>): V {
protected readonly _cachePolicy: CachePolicy = DEFAULT_CACHE_POLICY;

constructor(config: CacheConfig<LRUCacheProviderParam>)
constructor(maxEntry?: number)
constructor(param0: any) {
super(typeof param0 === 'number' ? param0 : (param0?.params?.maxEntry ?? 10240));
if (typeof param0 === 'object') {
this._cachePolicy = Object.assign(DEFAULT_CACHE_POLICY, param0.policy ?? {});
}
}


public getOrCreate<R>(key: K, producer: GeneralFunction<any[], R>): R {
if (!this.has(key)) {
const value = producer();
// work with async function
if (value instanceof Promise) {
// @ts-ignore
return value.then((result) => {
this.set(key, result);
return result;
});
try {
const value = producer();
// work with async function
if (value instanceof Promise) {
// @ts-ignore
return value
.then((result) => {
if (
result === null && !Boolean(this._cachePolicy.cacheNull) ||
result === undefined && !Boolean(this._cachePolicy.cacheUndefined)
) {
// do nothing
} else {
this.set(key, result);
}
return result;
})
.catch((error) => {
if (Boolean(this._cachePolicy.cacheThrow)) {
this.set(key, new CachedThrowError(error));
}
throw error;
});
}
if (
value === null && !Boolean(this._cachePolicy.cacheNull) ||
value === undefined && !Boolean(this._cachePolicy.cacheUndefined)
) {
// do nothing
} else {
this.set(key, value);
}
return value;
} catch (error) {
if (Boolean(this._cachePolicy.cacheThrow)) {
this.set(key, new CachedThrowError(error));
}
throw error;
}
this.set(key, value);

}
return this.get(key);
const cachedValue = this.get(key);
if (cachedValue instanceof CachedThrowError) {
throw cachedValue.getError();
}
return cachedValue;
}


}

interface TTLCacheProviderParam {
ttl?: number;
checkInterval?: number;
maxEntry?: number;
}

const DEFAULT_CACHE_PROVIDER_PARAM: TTLCacheProviderParam = {
ttl: 30 * 1000,
checkInterval: 60 * 1000,
maxEntry: 10240
};

/**
* TTL Cache Provider
*
Expand All @@ -55,11 +140,24 @@ export class LRUCacheProvider<K = any, V = any> extends LRUMap implements CacheP
*/
export class TTLCacheProvider<K = any, V = any> extends LRUCacheProvider<K, V> {

constructor(ttl: number = 30 * 1000, checkInterval: number = 60 * 1000, maxEntry = 10240) {
super(maxEntry);
this.ttl = ttl;
this.checkInterval = checkInterval;
this.timeoutStorage = new LRUCacheProvider(maxEntry);
constructor(config: CacheConfig<TTLCacheProviderParam>)
constructor(ttl?: number, checkInterval?: number, maxEntry?: number)
constructor(...params: any[]) {
super(params[0]);
const config = {
policy: DEFAULT_CACHE_POLICY, params: DEFAULT_CACHE_PROVIDER_PARAM
};
if (typeof params[0] === 'number') {
config.params.ttl = params[0] || config.params.ttl;
config.params.checkInterval = params[1] || config.params.checkInterval;
config.params.maxEntry = params[2] || config.params.maxEntry;
} else {
config.policy = Object.assign(config.policy || {}, params[0].policy || {});
config.params = Object.assign(config.policy || {}, params[0].params || {});
}
this.ttl = config.params.ttl;
this.checkInterval = config.params.checkInterval;
this.timeoutStorage = new LRUCacheProvider(config.params.maxEntry);
}

private timestamp() {
Expand Down
73 changes: 71 additions & 2 deletions test/cacheProvider.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { platform } from 'os';
import { sleep } from '../src';
import { TTLCacheProvider } from '../src/cacheProvider';
import { LRUCacheProvider, TTLCacheProvider } from '../src/cacheProvider';

let describe2 = describe;
if (platform() !== 'linux') {
Expand Down Expand Up @@ -31,7 +31,27 @@ describe2('cacheProviders', () => {

});

it('should process cache corret on error', async () => {
it('should support TTL CacheProvider (new)', async () => {

const ttlCache = new TTLCacheProvider({ params: { ttl: 500, checkInterval: 1000 } });

ttlCache.set('v', 1);
expect(ttlCache.has('v')).toBeTruthy();
await sleep(510);
expect(ttlCache.has('v')).toBeFalsy();
expect(ttlCache.set('v2', '123'));
await sleep(100);
expect(ttlCache.has('v2')).toBeTruthy();
await sleep(410);
expect(ttlCache.has('v2')).toBeFalsy();
ttlCache.clear();
expect(ttlCache['timer']).toBeUndefined();
expect(ttlCache.size).toBe(0);


});

it('should process cache correct on error', async () => {
const ttlCache = new TTLCacheProvider(500, 1000);

await expect(() => ttlCache.getOrCreate('v1', async () => { throw new Error('e1'); })).rejects.toThrowError('e1');
Expand All @@ -45,4 +65,53 @@ describe2('cacheProviders', () => {

});

it('should support cache policy', async () => {

const cache = new LRUCacheProvider();
let count = 0;
const runner2 = async () => { count++; return null; };
const runner3 = async () => { count++; return undefined; };
const runner4 = async () => { count++; throw new Error(); };


await cache.getOrCreate('f2', runner2);
await cache.getOrCreate('f2', runner2);
expect(count).toBe(2);
expect(cache.size).toBe(0);

await cache.getOrCreate('f3', runner3);
await cache.getOrCreate('f3', runner3);
expect(count).toBe(4);
expect(cache.size).toBe(0);

await expect(async () => cache.getOrCreate('f4', runner4)).rejects.toThrow();
await expect(async () => cache.getOrCreate('f4', runner4)).rejects.toThrow();
expect(count).toBe(6);
expect(cache.size).toBe(0);

cache.clear();
count = 0;

const cache2 = new LRUCacheProvider({ policy: { cacheNull: true, cacheUndefined: true, cacheThrow: true } });

await cache2.getOrCreate('f2', runner2);
expect(await cache2.getOrCreate('f2', runner2)).toBeNull();
expect(count).toBe(1);
expect(cache2.size).toBe(1);

await cache2.getOrCreate('f3', runner3);
expect(await cache2.getOrCreate('f3', runner3)).toBeUndefined();
expect(count).toBe(2);
expect(cache2.size).toBe(2);

await expect(async () => cache2.getOrCreate('f4', runner4)).rejects.toThrow();
await expect(async () => cache2.getOrCreate('f4', runner4)).rejects.toThrow();
expect(count).toBe(3);
expect(cache2.size).toBe(3);

cache2.clear();


});

});

0 comments on commit 5b824e1

Please sign in to comment.