Skip to content

Commit

Permalink
feat: added compression to resume data cache using deflate
Browse files Browse the repository at this point in the history
  • Loading branch information
wyattjoh committed Nov 26, 2024
1 parent 214e0d3 commit c0d74e8
Show file tree
Hide file tree
Showing 2 changed files with 84 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,33 @@ import {
import { createPrerenderResumeDataCache } from './resume-data-cache'
import { streamFromString } from '../stream-utils/node-web-streams-helper'

function createCacheWithSingleEntry() {
const cache = createPrerenderResumeDataCache()
cache.cache.set(
'key',
Promise.resolve({
value: streamFromString('value'),
tags: [],
stale: 0,
timestamp: 0,
expire: 0,
revalidate: 0,
})
)

return cache
}

describe('stringifyResumeDataCache', () => {
it('serializes an empty cache', async () => {
const cache = createPrerenderResumeDataCache()
expect(await stringifyResumeDataCache(cache)).toBe('null')
})

it('serializes a cache with a single entry', async () => {
const cache = createPrerenderResumeDataCache()
cache.cache.set(
'key',
Promise.resolve({
value: streamFromString('value'),
tags: [],
stale: 0,
timestamp: 0,
expire: 0,
revalidate: 0,
})
)

const cache = createCacheWithSingleEntry()
expect(await stringifyResumeDataCache(cache)).toMatchInlineSnapshot(
`"{"store":{"fetch":{},"cache":{"key":{"value":"dmFsdWU=","tags":[],"stale":0,"timestamp":0,"expire":0,"revalidate":0}}}}"`
`"eJwlibEKwzAMBf/lzR46C7r2D0qHkEHYSmNq02ApocHo30PS6bi7DrVvE1DHJBZnUPeAyHG+2kf2ExuXVUBI9aHp9bwjwPitoGEMUOMioFuA5SpqXJfL5Lfk9h9NNi45sZ3q7n4AKDonOw=="`
)
})
})
Expand All @@ -37,4 +42,14 @@ describe('parseResumeDataCache', () => {
createPrerenderResumeDataCache()
)
})

it('parses a cache with a single entry', async () => {
const cache = createCacheWithSingleEntry()
const serialized = await stringifyResumeDataCache(cache)

const parsed = createRenderResumeDataCache(serialized)

expect(parsed.cache.size).toBe(1)
expect(parsed.fetch.size).toBe(0)
})
})
85 changes: 55 additions & 30 deletions packages/next/src/server/resume-data-cache/resume-data-cache.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { InvariantError } from '../../shared/lib/invariant-error'
import {
type UseCacheCacheStore,
type FetchCacheStore,
Expand Down Expand Up @@ -64,22 +65,32 @@ type ResumeStoreSerialized = {
export async function stringifyResumeDataCache(
resumeDataCache: RenderResumeDataCache | PrerenderResumeDataCache
): Promise<string> {
if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) {
return 'null'
}
if (process.env.NEXT_RUNTIME === 'edge') {
throw new InvariantError(
'`stringifyResumeDataCache` should not be called in edge runtime.'
)
} else {
if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) {
return 'null'
}

const json: ResumeStoreSerialized = {
store: {
fetch: Object.fromEntries(
stringifyFetchCacheStore(resumeDataCache.fetch.entries())
),
cache: Object.fromEntries(
await stringifyUseCacheCacheStore(resumeDataCache.cache.entries())
),
},
}
const json: ResumeStoreSerialized = {
store: {
fetch: Object.fromEntries(
stringifyFetchCacheStore(resumeDataCache.fetch.entries())
),
cache: Object.fromEntries(
await stringifyUseCacheCacheStore(resumeDataCache.cache.entries())
),
},
}

return JSON.stringify(json)
// Compress the JSON string using zlib. As the data we already want to
// decompress is in memory, we use the synchronous deflateSync function.
const { deflateSync } = require('node:zlib') as typeof import('node:zlib')

return deflateSync(JSON.stringify(json)).toString('base64')
}
}

/**
Expand Down Expand Up @@ -114,24 +125,38 @@ export function createRenderResumeDataCache(
export function createRenderResumeDataCache(
prerenderResumeDataCacheOrPersistedCache: PrerenderResumeDataCache | string
): RenderResumeDataCache {
if (typeof prerenderResumeDataCacheOrPersistedCache !== 'string') {
// If the cache is already a prerender cache, we can return it directly,
// we're just performing a type change.
return prerenderResumeDataCacheOrPersistedCache
}
if (process.env.NEXT_RUNTIME === 'edge') {
throw new InvariantError(
'`createRenderResumeDataCache` should not be called in edge runtime.'
)
} else {
if (typeof prerenderResumeDataCacheOrPersistedCache !== 'string') {
// If the cache is already a prerender cache, we can return it directly,
// we're just performing a type change.
return prerenderResumeDataCacheOrPersistedCache
}

if (prerenderResumeDataCacheOrPersistedCache === 'null') {
return {
cache: new Map(),
fetch: new Map(),
if (prerenderResumeDataCacheOrPersistedCache === 'null') {
return {
cache: new Map(),
fetch: new Map(),
}
}
}

const json: ResumeStoreSerialized = JSON.parse(
prerenderResumeDataCacheOrPersistedCache
)
return {
cache: parseUseCacheCacheStore(Object.entries(json.store.cache)),
fetch: parseFetchCacheStore(Object.entries(json.store.fetch)),
// This should be a compressed string. Let's decompress it using zlib.
// As the data we already want to decompress is in memory, we use the
// synchronous inflateSync function.
const { inflateSync } = require('node:zlib') as typeof import('node:zlib')

const json: ResumeStoreSerialized = JSON.parse(
inflateSync(
Buffer.from(prerenderResumeDataCacheOrPersistedCache, 'base64')
).toString('utf-8')
)

return {
cache: parseUseCacheCacheStore(Object.entries(json.store.cache)),
fetch: parseFetchCacheStore(Object.entries(json.store.fetch)),
}
}
}

0 comments on commit c0d74e8

Please sign in to comment.