implemented cache

main
Inga 🏳‍🌈 1 year ago
parent e38fb2408a
commit adbb5edb4b
  1. 144
      src/storage/cache.spec.ts
  2. 41
      src/storage/cache.ts
  3. 45
      src/storage/inMemoryDB.ts
  4. 11
      src/storage/types.ts
  5. 18
      src/utils/serializer.ts
  6. 6
      src/utils/throttle.ts

@ -0,0 +1,144 @@
import { sleep } from '../utils/eventLoop';
import { StringifiableValue } from '../utils/serializer';
import { CacheEntry, createCachedDataProvider } from './cache';
import { createKeyValueStorage } from './inMemoryDB';
type ValueType = Record<string, StringifiableValue>;
describe('createCachedDataProvider', () => {
it('always fetches remote data when ttl is zero, for sequential calls', async () => {
const calledWith: unknown[] = [];
const getValue = createCachedDataProvider({
cacheStorage: createKeyValueStorage<CacheEntry<ValueType>>(0),
getNewValue: (key) => {
calledWith.push(key);
return Promise.resolve(key);
},
ttlMs: 0,
});
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
await sleep(10); // to make sure that Date.now() does not always return the same value
expect(await getValue({ key: 2 })).toEqual({ key: 2 });
await sleep(10);
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
await sleep(10);
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
await sleep(10);
expect(await getValue({ key: 3 })).toEqual({ key: 3 });
expect(calledWith).toEqual([
{ key: 1 },
{ key: 2 },
{ key: 1 },
{ key: 1 },
{ key: 3 },
]);
});
it('never refetches remote data when ttl is 1 hour, for sequential calls', async () => {
const calledWith: unknown[] = [];
const getValue = createCachedDataProvider({
cacheStorage: createKeyValueStorage<CacheEntry<ValueType>>(0),
getNewValue: (key) => {
calledWith.push(key);
return Promise.resolve(key);
},
ttlMs: 3_600_000,
});
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
await sleep(10);
expect(await getValue({ key: 2 })).toEqual({ key: 2 });
await sleep(10);
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
await sleep(10);
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
await sleep(10);
expect(await getValue({ key: 3 })).toEqual({ key: 3 });
expect(calledWith).toEqual([{ key: 1 }, { key: 2 }, { key: 3 }]);
});
it('refetches remote data when ttl is 1 hour but cache is reset', async () => {
const cacheStorage = createKeyValueStorage<CacheEntry<ValueType>>(0);
const calledWith: unknown[] = [];
const getValue = createCachedDataProvider({
cacheStorage,
getNewValue: (key) => {
calledWith.push(key);
return Promise.resolve(key);
},
ttlMs: 3_600_000,
});
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
await cacheStorage.clear();
expect(await getValue({ key: 1 })).toEqual({ key: 1 });
expect(calledWith).toEqual([{ key: 1 }, { key: 1 }]);
});
it('refetches remote data after 200ms when ttl is 100ms', async () => {
const calledWith: unknown[] = [];
const getValue = createCachedDataProvider({
cacheStorage: createKeyValueStorage<CacheEntry<ValueType>>(0),
getNewValue: (key: ValueType) => {
const callNumber = calledWith.length;
calledWith.push(key);
return Promise.resolve({ ...key, callNumber });
},
ttlMs: 100,
});
expect(await getValue({ key: 1 })).toEqual({ key: 1, callNumber: 0 });
expect(await getValue({ key: 2 })).toEqual({ key: 2, callNumber: 1 });
expect(await getValue({ key: 1 })).toEqual({ key: 1, callNumber: 0 });
expect(await getValue({ key: 2 })).toEqual({ key: 2, callNumber: 1 });
await sleep(200);
expect(await getValue({ key: 1 })).toEqual({ key: 1, callNumber: 2 });
expect(await getValue({ key: 2 })).toEqual({ key: 2, callNumber: 3 });
expect(await getValue({ key: 1 })).toEqual({ key: 1, callNumber: 2 });
expect(await getValue({ key: 2 })).toEqual({ key: 2, callNumber: 3 });
expect(calledWith).toEqual([
{ key: 1 },
{ key: 2 },
{ key: 1 },
{ key: 2 },
]);
});
it('never refetches remote data even when ttl is zero, for concurrent calls', async () => {
const calledWith: unknown[] = [];
const getValue = createCachedDataProvider({
// We need noticeable cache latency for deterministic behavior, so that it will always throttle
// instead of switching between throttling and caching at random
cacheStorage: createKeyValueStorage<CacheEntry<ValueType>>(10),
getNewValue: (key: ValueType) => {
const callNumber = calledWith.length;
calledWith.push(key);
return Promise.resolve({ ...key, callNumber });
},
ttlMs: 0,
});
expect(
await Promise.all([
getValue({ key: 1 }),
getValue({ key: 2 }),
getValue({ key: 1 }),
getValue({ key: 1 }),
getValue({ key: 3 }),
]),
).toEqual([
{ key: 1, callNumber: 0 },
{ key: 2, callNumber: 1 },
{ key: 1, callNumber: 0 },
{ key: 1, callNumber: 0 },
{ key: 3, callNumber: 2 },
]);
expect(calledWith).toEqual([{ key: 1 }, { key: 2 }, { key: 3 }]);
});
});

@ -0,0 +1,41 @@
import { throttle } from '../utils/throttle';
import { StringifiableValue, createSerializer } from '../utils/serializer';
import { ClearableKeyValueStorage } from './types';
type DataProvider<TKey, TValue> = (key: TKey) => Promise<TValue>;
export type CacheEntry<TValue> = {
expirationDate: number;
cachedValue: TValue;
};
export const createCachedDataProvider = <
TKey extends StringifiableValue,
TValue,
>({
cacheStorage,
getNewValue,
ttlMs,
}: {
cacheStorage: ClearableKeyValueStorage<string, CacheEntry<TValue>>;
getNewValue: DataProvider<TKey, TValue>;
ttlMs: number;
}) => {
const keySerializer = createSerializer<TKey>();
const unsafeGet = async (key: TKey) => {
const cacheEntry = await cacheStorage.get(keySerializer.stringify(key));
if (cacheEntry.found && cacheEntry.value.expirationDate >= Date.now()) {
return cacheEntry.value.cachedValue;
}
const newValue = await getNewValue(key);
await cacheStorage.set(keySerializer.stringify(key), {
expirationDate: Date.now() + ttlMs,
cachedValue: newValue,
});
return newValue;
};
return throttle(unsafeGet);
};

@ -0,0 +1,45 @@
import { sleep } from '../utils/eventLoop';
import { StringifiableValue, createSerializer } from '../utils/serializer';
import { ClearableKeyValueStorage } from './types';
export const createKeyValueStorage = <TValue extends StringifiableValue>(
latencyMs: number,
) => {
const withSimulatedLatency = async <TResult>(
f: () => Promise<TResult>,
): Promise<TResult> => {
// TODO: do some CPU load here, to make performance similar to real DBs, for performance testing
await sleep(latencyMs);
// TODO: do some additional CPU load here
return f();
};
const serializer = createSerializer<TValue>();
const storage = new Map<string, string>();
return {
get: (key: string) =>
withSimulatedLatency(() =>
Promise.resolve(
storage.has(key)
? {
found: true as const,
// `storage.get(key)` is guaranteed to return value, because we just checked that it's there
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
value: serializer.parse(storage.get(key)!),
}
: {
found: false as const,
},
),
),
set: (key: string, value: TValue) =>
withSimulatedLatency(() =>
Promise.resolve(
void storage.set(key, serializer.stringify(value)),
),
),
clear: () =>
// eslint-disable-next-line @typescript-eslint/no-confusing-void-expression
withSimulatedLatency(() => Promise.resolve(storage.clear())),
} as ClearableKeyValueStorage<string, TValue>;
};

@ -0,0 +1,11 @@
export type KeyValueStorage<TKey, TValue> = {
get(key: TKey): Promise<{ found: true; value: TValue } | { found: false }>;
set(key: TKey, value: TValue): Promise<void>;
};
export type ClearableKeyValueStorage<TKey, TValue> = KeyValueStorage<
TKey,
TValue
> & {
clear(): Promise<void>;
};

@ -0,0 +1,18 @@
// Type that survives being serialized to json and back
export type StringifiableValue =
| string
| number
| boolean
| null
| undefined
| StringifiableValue[]
| {
[key: string]: StringifiableValue;
};
// To be used instead of JSON.stringify / JSON.parse everywhere, in order to ensure that values are of stringifiable type
export const createSerializer = <TValue extends StringifiableValue>() =>
JSON as {
stringify(value: TValue): string;
parse(serialized: string): TValue;
};

@ -1,4 +1,5 @@
import { nextTick } from './eventLoop'; import { nextTick } from './eventLoop';
import { StringifiableValue, createSerializer } from './serializer';
/** /**
* Function like this probably already exists on npm, or maybe it can be constructed from lodash tools etc. * Function like this probably already exists on npm, or maybe it can be constructed from lodash tools etc.
@ -20,12 +21,13 @@ import { nextTick } from './eventLoop';
* @returns Wrapped function, such that if it is called with the same arguments as some of the previous calls * @returns Wrapped function, such that if it is called with the same arguments as some of the previous calls
* that did not yet resolve, it will return the previous promise rather than invoke `f` again. * that did not yet resolve, it will return the previous promise rather than invoke `f` again.
*/ */
export const throttle = <TArgs extends unknown[], TResult>( export const throttle = <TArgs extends StringifiableValue[], TResult>(
f: (...args: TArgs) => Promise<TResult>, f: (...args: TArgs) => Promise<TResult>,
) => { ) => {
const argsSerializer = createSerializer<TArgs>();
const promises = new Map<string, Promise<TResult>>(); const promises = new Map<string, Promise<TResult>>();
return (...args: TArgs) => { return (...args: TArgs) => {
const promiseKey = JSON.stringify(args); const promiseKey = argsSerializer.stringify(args);
if (!promises.has(promiseKey)) { if (!promises.has(promiseKey)) {
promises.set( promises.set(
promiseKey, promiseKey,

Loading…
Cancel
Save