From 2ca2d9da4726f303e4d2fa75f163fee1d7b47aaa Mon Sep 17 00:00:00 2001 From: thisisnkc Date: Fri, 27 Mar 2026 22:43:20 +0530 Subject: [PATCH 1/2] fix: avoid crash when refilling L1 from L2 with serialize:false Closes #111 --- packages/bentocache/src/cache/cache.ts | 4 +- .../src/cache/cache_entry/cache_entry.ts | 7 +- packages/bentocache/src/cache/cache_stack.ts | 9 +++ .../src/cache/get_set/two_tier_handler.ts | 2 +- .../bentocache/tests/cache/two_tier.spec.ts | 69 +++++++++++++++++++ 5 files changed, 86 insertions(+), 5 deletions(-) diff --git a/packages/bentocache/src/cache/cache.ts b/packages/bentocache/src/cache/cache.ts index f391b3f..1dd6645 100644 --- a/packages/bentocache/src/cache/cache.ts +++ b/packages/bentocache/src/cache/cache.ts @@ -82,7 +82,7 @@ export class Cache implements CacheProvider { const isRemoteItemValid = await this.#stack.isEntryValid(remoteItem) if (isRemoteItemValid) { - this.#stack.l1?.set(key, remoteItem!.entry.serialize(), options) + this.#stack.l1?.set(key, this.#stack.prepareForL1(remoteItem!.entry), options) this.#stack.emit(cacheEvents.hit(key, remoteItem!.entry.getValue(), this.name)) this.#options.logger.logL2Hit({ cacheName: this.name, key, options }) message.hit = true @@ -92,7 +92,7 @@ export class Cache implements CacheProvider { } if (remoteItem && options.isGraceEnabled()) { - this.#stack.l1?.set(key, remoteItem.entry.serialize(), options) + this.#stack.l1?.set(key, this.#stack.prepareForL1(remoteItem.entry), options) this.#stack.emit(cacheEvents.hit(key, remoteItem.entry.serialize(), this.name, 'l2', true)) this.#options.logger.logL2Hit({ cacheName: this.name, key, options, graced: true }) message.hit = true diff --git a/packages/bentocache/src/cache/cache_entry/cache_entry.ts b/packages/bentocache/src/cache/cache_entry/cache_entry.ts index 9489750..2c9b9fe 100644 --- a/packages/bentocache/src/cache/cache_entry/cache_entry.ts +++ b/packages/bentocache/src/cache/cache_entry/cache_entry.ts @@ -78,14 +78,17 @@ export class CacheEntry { return this } - serialize() { - const raw = { + toRaw() { + return { value: this.#value, createdAt: this.#createdAt, logicalExpiration: this.#logicalExpiration, ...(this.#tags.length > 0 && { tags: this.#tags }), } + } + serialize() { + const raw = this.toRaw() if (this.#serializer) return this.#serializer.serialize(raw) return raw } diff --git a/packages/bentocache/src/cache/cache_stack.ts b/packages/bentocache/src/cache/cache_stack.ts index 04d9fd7..949709c 100644 --- a/packages/bentocache/src/cache/cache_stack.ts +++ b/packages/bentocache/src/cache/cache_stack.ts @@ -9,6 +9,7 @@ import { BaseDriver } from '../drivers/base_driver.js' import { RemoteCache } from './facades/remote_cache.js' import { cacheEvents } from '../events/cache_events.js' import { cacheOperation } from '../tracing_channels.js' +import type { CacheEntry } from './cache_entry/cache_entry.js' import type { GetSetHandler } from './get_set/get_set_handler.js' import type { BentoCacheOptions } from '../bento_cache_options.js' import type { GetCacheValueReturn } from '../types/internals/index.js' @@ -125,6 +126,14 @@ export class CacheStack extends BaseDriver { return this.emitter.emit(event.name, event.data) } + /** + * Prepare a cache entry for L1 storage, returning serialized + * or raw data based on the serializeL1 option + */ + prepareForL1(entry: CacheEntry) { + return this.options.serializeL1 ? entry.serialize() : entry.toRaw() + } + /** * Returns the full key with prefix applied */ diff --git a/packages/bentocache/src/cache/get_set/two_tier_handler.ts b/packages/bentocache/src/cache/get_set/two_tier_handler.ts index eb0f22c..5bf36d4 100644 --- a/packages/bentocache/src/cache/get_set/two_tier_handler.ts +++ b/packages/bentocache/src/cache/get_set/two_tier_handler.ts @@ -62,7 +62,7 @@ export class TwoTierHandler { message.tier = 'l2' } - this.stack.l1?.set(key, item.entry.serialize(), options) + this.stack.l1?.set(key, this.stack.prepareForL1(item.entry), options) this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name, 'l2')) return item.entry.getValue() diff --git a/packages/bentocache/tests/cache/two_tier.spec.ts b/packages/bentocache/tests/cache/two_tier.spec.ts index 462cd6f..e0eb3a9 100644 --- a/packages/bentocache/tests/cache/two_tier.spec.ts +++ b/packages/bentocache/tests/cache/two_tier.spec.ts @@ -10,6 +10,75 @@ import { CacheFactory } from '../../factories/cache_factory.js' import { L2CacheError, UndefinedValueError } from '../../src/errors.js' import { throwingFactory, slowFactory, REDIS_CREDENTIALS } from '../helpers/index.js' +test.group('Two tier cache | serialize: false on L1', () => { + test('L2 hit should correctly refill L1 when serialize is false', async ({ assert }) => { + const { cache, local, remote, stack } = new CacheFactory() + .withMemoryL1({ serialize: false }) + .withL1L2Config() + .create() + + await remote.set('foo', JSON.stringify({ value: { name: 'John' } }), stack.defaultOptions) + + const r1 = await cache.get({ key: 'foo' }) + assert.deepEqual(r1, { name: 'John' }) + + const r2 = await cache.get({ key: 'foo' }) + assert.deepEqual(r2, { name: 'John' }) + + const l1Entry = local.get('foo', stack.defaultOptions) + assert.deepEqual(l1Entry?.entry.getValue(), { name: 'John' }) + assert.isNotString(l1Entry?.entry.getValue()) + }) + + test('getOrSet L2 hit should correctly refill L1 when serialize is false', async ({ assert }) => { + const { cache, remote, stack } = new CacheFactory() + .withMemoryL1({ serialize: false }) + .withL1L2Config() + .create() + + await remote.set('foo', JSON.stringify({ value: 'bar' }), stack.defaultOptions) + + const r1 = await cache.getOrSet({ + key: 'foo', + factory: throwingFactory('should not be called'), + }) + assert.deepEqual(r1, 'bar') + + const r2 = await cache.getOrSet({ + key: 'foo', + factory: throwingFactory('should not be called'), + }) + assert.deepEqual(r2, 'bar') + }) + + test('grace period backoff should correctly store in L1 when serialize is false', async ({ + assert, + }) => { + const { cache } = new CacheFactory() + .withMemoryL1({ serialize: false }) + .withL1L2Config() + .merge({ ttl: 100, grace: '10m', timeout: null }) + .create() + + const r1 = await cache.getOrSet({ key: 'key1', factory: () => ({ foo: 'bar' }) }) + + await sleep(100) + + const r2 = await cache.getOrSet({ + key: 'key1', + factory: () => { + throw new Error('factory error') + }, + }) + + assert.deepEqual(r1, { foo: 'bar' }) + assert.deepEqual(r2, { foo: 'bar' }) + + const r3 = await cache.get({ key: 'key1' }) + assert.deepEqual(r3, { foo: 'bar' }) + }) +}) + test.group('Two tier cache', () => { test('get() returns null if null is stored', async ({ assert }) => { const { cache } = new CacheFactory().withL1L2Config().create() From e499df6efa4a14d34d3391af6119c641239e4f26 Mon Sep 17 00:00:00 2001 From: thisisnkc Date: Fri, 27 Mar 2026 22:51:26 +0530 Subject: [PATCH 2/2] chore: add changeset --- .changeset/curly-laws-doubt.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/curly-laws-doubt.md diff --git a/.changeset/curly-laws-doubt.md b/.changeset/curly-laws-doubt.md new file mode 100644 index 0000000..3616cdb --- /dev/null +++ b/.changeset/curly-laws-doubt.md @@ -0,0 +1,5 @@ +--- +'bentocache': patch +--- + +Fixed crash when serialize is false in L1 with an L2 cache. L2 was storing serialized data in L1, causing a TypeError. Now L1 stores raw objects correctly.