Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/curly-laws-doubt.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'bentocache': patch
---

Fixed crash when serialize is false in L1 with an L2 cache. L2 was storing serialized data in L1, causing a TypeError. Now L1 stores raw objects correctly.
4 changes: 2 additions & 2 deletions packages/bentocache/src/cache/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ export class Cache implements CacheProvider {
const isRemoteItemValid = await this.#stack.isEntryValid(remoteItem)

if (isRemoteItemValid) {
this.#stack.l1?.set(key, remoteItem!.entry.serialize(), options)
this.#stack.l1?.set(key, this.#stack.prepareForL1(remoteItem!.entry), options)
this.#stack.emit(cacheEvents.hit(key, remoteItem!.entry.getValue(), this.name))
this.#options.logger.logL2Hit({ cacheName: this.name, key, options })
message.hit = true
Expand All @@ -92,7 +92,7 @@ export class Cache implements CacheProvider {
}

if (remoteItem && options.isGraceEnabled()) {
this.#stack.l1?.set(key, remoteItem.entry.serialize(), options)
this.#stack.l1?.set(key, this.#stack.prepareForL1(remoteItem.entry), options)
this.#stack.emit(cacheEvents.hit(key, remoteItem.entry.serialize(), this.name, 'l2', true))
this.#options.logger.logL2Hit({ cacheName: this.name, key, options, graced: true })
message.hit = true
Expand Down
7 changes: 5 additions & 2 deletions packages/bentocache/src/cache/cache_entry/cache_entry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,14 +78,17 @@ export class CacheEntry {
return this
}

serialize() {
const raw = {
toRaw() {
return {
value: this.#value,
createdAt: this.#createdAt,
logicalExpiration: this.#logicalExpiration,
...(this.#tags.length > 0 && { tags: this.#tags }),
}
}

serialize() {
const raw = this.toRaw()
if (this.#serializer) return this.#serializer.serialize(raw)
return raw
}
Expand Down
9 changes: 9 additions & 0 deletions packages/bentocache/src/cache/cache_stack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import { BaseDriver } from '../drivers/base_driver.js'
import { RemoteCache } from './facades/remote_cache.js'
import { cacheEvents } from '../events/cache_events.js'
import { cacheOperation } from '../tracing_channels.js'
import type { CacheEntry } from './cache_entry/cache_entry.js'
import type { GetSetHandler } from './get_set/get_set_handler.js'
import type { BentoCacheOptions } from '../bento_cache_options.js'
import type { GetCacheValueReturn } from '../types/internals/index.js'
Expand Down Expand Up @@ -125,6 +126,14 @@ export class CacheStack extends BaseDriver {
return this.emitter.emit(event.name, event.data)
}

/**
* Prepare a cache entry for L1 storage, returning serialized
* or raw data based on the serializeL1 option
*/
prepareForL1(entry: CacheEntry) {
return this.options.serializeL1 ? entry.serialize() : entry.toRaw()
}

/**
* Returns the full key with prefix applied
*/
Expand Down
2 changes: 1 addition & 1 deletion packages/bentocache/src/cache/get_set/two_tier_handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ export class TwoTierHandler {
message.tier = 'l2'
}

this.stack.l1?.set(key, item.entry.serialize(), options)
this.stack.l1?.set(key, this.stack.prepareForL1(item.entry), options)

this.#emit(cacheEvents.hit(key, item.entry.getValue(), this.stack.name, 'l2'))
return item.entry.getValue()
Expand Down
69 changes: 69 additions & 0 deletions packages/bentocache/tests/cache/two_tier.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,75 @@ import { CacheFactory } from '../../factories/cache_factory.js'
import { L2CacheError, UndefinedValueError } from '../../src/errors.js'
import { throwingFactory, slowFactory, REDIS_CREDENTIALS } from '../helpers/index.js'

test.group('Two tier cache | serialize: false on L1', () => {
test('L2 hit should correctly refill L1 when serialize is false', async ({ assert }) => {
const { cache, local, remote, stack } = new CacheFactory()
.withMemoryL1({ serialize: false })
.withL1L2Config()
.create()

await remote.set('foo', JSON.stringify({ value: { name: 'John' } }), stack.defaultOptions)

const r1 = await cache.get({ key: 'foo' })
assert.deepEqual(r1, { name: 'John' })

const r2 = await cache.get({ key: 'foo' })
assert.deepEqual(r2, { name: 'John' })

const l1Entry = local.get('foo', stack.defaultOptions)
assert.deepEqual(l1Entry?.entry.getValue(), { name: 'John' })
assert.isNotString(l1Entry?.entry.getValue())
})

test('getOrSet L2 hit should correctly refill L1 when serialize is false', async ({ assert }) => {
const { cache, remote, stack } = new CacheFactory()
.withMemoryL1({ serialize: false })
.withL1L2Config()
.create()

await remote.set('foo', JSON.stringify({ value: 'bar' }), stack.defaultOptions)

const r1 = await cache.getOrSet({
key: 'foo',
factory: throwingFactory('should not be called'),
})
assert.deepEqual(r1, 'bar')

const r2 = await cache.getOrSet({
key: 'foo',
factory: throwingFactory('should not be called'),
})
assert.deepEqual(r2, 'bar')
})

test('grace period backoff should correctly store in L1 when serialize is false', async ({
assert,
}) => {
const { cache } = new CacheFactory()
.withMemoryL1({ serialize: false })
.withL1L2Config()
.merge({ ttl: 100, grace: '10m', timeout: null })
.create()

const r1 = await cache.getOrSet({ key: 'key1', factory: () => ({ foo: 'bar' }) })

await sleep(100)

const r2 = await cache.getOrSet({
key: 'key1',
factory: () => {
throw new Error('factory error')
},
})

assert.deepEqual(r1, { foo: 'bar' })
assert.deepEqual(r2, { foo: 'bar' })

const r3 = await cache.get({ key: 'key1' })
assert.deepEqual(r3, { foo: 'bar' })
})
})

test.group('Two tier cache', () => {
test('get() returns null if null is stored', async ({ assert }) => {
const { cache } = new CacheFactory().withL1L2Config().create()
Expand Down
Loading