-
Notifications
You must be signed in to change notification settings - Fork 161
Implement persistent data cache option and update cache key logic #920
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 5 commits
02159d3
b989b0c
2e38e4c
a8b69ff
456286b
f60141f
373547b
4710f0a
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
--- | ||
"@opennextjs/aws": minor | ||
--- | ||
|
||
Add an option to keep the data cache persistent between deployments | ||
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -3,7 +3,12 @@ import type { | |
IncrementalCacheContext, | ||
IncrementalCacheValue, | ||
} from "types/cache"; | ||
import { getTagsFromValue, hasBeenRevalidated, writeTags } from "utils/cache"; | ||
import { | ||
createCacheKey, | ||
getTagsFromValue, | ||
hasBeenRevalidated, | ||
writeTags, | ||
} from "utils/cache"; | ||
import { isBinaryContentType } from "../utils/binary"; | ||
import { debug, error, warn } from "./logger"; | ||
|
||
|
@@ -31,7 +36,7 @@ function isFetchCache( | |
// We need to use globalThis client here as this class can be defined at load time in next 12 but client is not available at load time | ||
export default class Cache { | ||
conico974 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
public async get( | ||
key: string, | ||
baseKey: string, | ||
// fetchCache is for next 13.5 and above, kindHint is for next 14 and above and boolean is for earlier versions | ||
options?: | ||
| boolean | ||
|
@@ -49,22 +54,24 @@ export default class Cache { | |
|
||
const softTags = typeof options === "object" ? options.softTags : []; | ||
const tags = typeof options === "object" ? options.tags : []; | ||
return isFetchCache(options) | ||
? this.getFetchCache(key, softTags, tags) | ||
: this.getIncrementalCache(key); | ||
const isDataCache = isFetchCache(options); | ||
return isDataCache | ||
conico974 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
? this.getFetchCache(baseKey, softTags, tags) | ||
: this.getIncrementalCache(baseKey); | ||
} | ||
|
||
async getFetchCache(key: string, softTags?: string[], tags?: string[]) { | ||
debug("get fetch cache", { key, softTags, tags }); | ||
async getFetchCache(baseKey: string, softTags?: string[], tags?: string[]) { | ||
debug("get fetch cache", { baseKey, softTags, tags }); | ||
try { | ||
const key = createCacheKey(baseKey, true); | ||
const cachedEntry = await globalThis.incrementalCache.get(key, "fetch"); | ||
|
||
if (cachedEntry?.value === undefined) return null; | ||
|
||
const _tags = [...(tags ?? []), ...(softTags ?? [])]; | ||
const _lastModified = cachedEntry.lastModified ?? Date.now(); | ||
const _hasBeenRevalidated = await hasBeenRevalidated( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Maybe we should add a comment explaining why the base key is used (inline + on the method)? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Re-opening. |
||
key, | ||
baseKey, | ||
_tags, | ||
cachedEntry, | ||
); | ||
|
@@ -105,8 +112,11 @@ export default class Cache { | |
} | ||
} | ||
|
||
async getIncrementalCache(key: string): Promise<CacheHandlerValue | null> { | ||
async getIncrementalCache( | ||
baseKey: string, | ||
): Promise<CacheHandlerValue | null> { | ||
try { | ||
const key = createCacheKey(baseKey, false); | ||
const cachedEntry = await globalThis.incrementalCache.get(key, "cache"); | ||
|
||
if (!cachedEntry?.value) { | ||
|
@@ -119,7 +129,7 @@ export default class Cache { | |
const tags = getTagsFromValue(cacheData); | ||
const _lastModified = cachedEntry.lastModified ?? Date.now(); | ||
const _hasBeenRevalidated = await hasBeenRevalidated( | ||
key, | ||
baseKey, | ||
tags, | ||
cachedEntry, | ||
); | ||
|
@@ -191,20 +201,22 @@ export default class Cache { | |
} | ||
|
||
async set( | ||
key: string, | ||
baseKey: string, | ||
data?: IncrementalCacheValue, | ||
ctx?: IncrementalCacheContext, | ||
): Promise<void> { | ||
if (globalThis.openNextConfig.dangerous?.disableIncrementalCache) { | ||
return; | ||
} | ||
const key = createCacheKey(baseKey, data?.kind === "FETCH"); | ||
// This one might not even be necessary anymore | ||
// Better be safe than sorry | ||
const detachedPromise = globalThis.__openNextAls | ||
.getStore() | ||
?.pendingPromiseRunner.withResolvers<void>(); | ||
try { | ||
if (data === null || data === undefined) { | ||
// only case where we delete the cache is for ISR/SSG cache | ||
await globalThis.incrementalCache.delete(key); | ||
} else { | ||
const revalidate = this.extractRevalidateForSet(ctx); | ||
|
@@ -302,7 +314,7 @@ export default class Cache { | |
} | ||
} | ||
|
||
await this.updateTagsOnSet(key, data, ctx); | ||
await this.updateTagsOnSet(baseKey, data, ctx); | ||
debug("Finished setting cache"); | ||
} catch (e) { | ||
error("Failed to set cache", e); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,10 +4,33 @@ import { fromReadableStream, toReadableStream } from "utils/stream"; | |
import { debug } from "./logger"; | ||
|
||
const pendingWritePromiseMap = new Map<string, Promise<ComposableCacheEntry>>(); | ||
/** | ||
* Get the cache key for a composable entry. | ||
* Composable cache keys are a special cases as they are a stringified version of a tuple composed of a representation of the BUILD_ID and the actual key. | ||
* @param key The composable cache key | ||
* @returns The composable cache key. | ||
*/ | ||
function getComposableCacheKey(key: string): string { | ||
try { | ||
const shouldPrependBuildId = | ||
globalThis.openNextConfig.dangerous?.persistentDataCache !== true; | ||
if (shouldPrependBuildId) { | ||
return key; | ||
} | ||
const [_buildId, ...rest] = JSON.parse(key); | ||
return JSON.stringify([...rest]); | ||
} catch (e) { | ||
debug("Error while parsing composable cache key", e); | ||
conico974 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
// If we fail to parse the key, we just return it as is | ||
// This is not ideal, but we don't want to crash the application | ||
return key; | ||
} | ||
} | ||
|
||
export default { | ||
async get(cacheKey: string) { | ||
async get(key: string) { | ||
try { | ||
const cacheKey = getComposableCacheKey(key); | ||
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Would it make sense to move that l45 and use edit: edit2: There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Here it doesn't really matter, it's in memory anyway. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. SGTM |
||
// We first check if we have a pending write for this cache key | ||
// If we do, we return the pending promise instead of fetching the cache | ||
if (pendingWritePromiseMap.has(cacheKey)) { | ||
|
@@ -55,7 +78,8 @@ export default { | |
} | ||
}, | ||
|
||
async set(cacheKey: string, pendingEntry: Promise<ComposableCacheEntry>) { | ||
async set(key: string, pendingEntry: Promise<ComposableCacheEntry>) { | ||
const cacheKey = getComposableCacheKey(key); | ||
pendingWritePromiseMap.set(cacheKey, pendingEntry); | ||
const entry = await pendingEntry.finally(() => { | ||
pendingWritePromiseMap.delete(cacheKey); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -80,3 +80,19 @@ export async function writeTags( | |
// Here we know that we have the correct type | ||
await globalThis.tagCache.writeTags(tagsToWrite as any); | ||
} | ||
|
||
export function createCacheKey(key: string, isDataCache: boolean): string { | ||
conico974 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
// We always prepend the build ID to the cache key for ISR/SSG cache entry | ||
// For data cache, we only prepend the build ID if the persistentDataCache is not enabled | ||
Comment on lines
+90
to
+91
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. "prepend" do not sound right any more. would be great to update the comments and move to their corresponding case. |
||
const shouldPrependBuildId = | ||
globalThis.openNextConfig.dangerous?.persistentDataCache !== true || | ||
!isDataCache; | ||
if (shouldPrependBuildId) { | ||
// If we don't have a build ID, we just return the key as is | ||
if (!process.env.NEXT_BUILD_ID) { | ||
return key; | ||
} | ||
conico974 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
return `${process.env.NEXT_BUILD_ID}/${key}`; | ||
} | ||
return key; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
import { createCacheKey } from "@opennextjs/aws/utils/cache.js"; | ||
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; | ||
|
||
describe("createCacheKey", () => { | ||
const originalEnv = process.env; | ||
const originalGlobalThis = globalThis as any; | ||
|
||
beforeEach(() => { | ||
vi.resetModules(); | ||
process.env = { ...originalEnv }; | ||
|
||
// Mock globalThis.openNextConfig | ||
if (!globalThis.openNextConfig) { | ||
globalThis.openNextConfig = { | ||
dangerous: {}, | ||
}; | ||
} | ||
}); | ||
|
||
afterEach(() => { | ||
process.env = originalEnv; | ||
globalThis.openNextConfig = originalGlobalThis.openNextConfig; | ||
}); | ||
|
||
test("prepends build ID for non-data cache entries", () => { | ||
process.env.NEXT_BUILD_ID = "test-build-id"; | ||
const key = "test-key"; | ||
|
||
const result = createCacheKey(key, false); | ||
|
||
expect(result).toBe("test-build-id/test-key"); | ||
}); | ||
|
||
test("prepends build ID for data cache when persistentDataCache is not enabled", () => { | ||
process.env.NEXT_BUILD_ID = "test-build-id"; | ||
globalThis.openNextConfig.dangerous.persistentDataCache = false; | ||
const key = "test-key"; | ||
|
||
const result = createCacheKey(key, true); | ||
|
||
expect(result).toBe("test-build-id/test-key"); | ||
}); | ||
|
||
test("does not prepend build ID for data cache when persistentDataCache is enabled", () => { | ||
process.env.NEXT_BUILD_ID = "test-build-id"; | ||
globalThis.openNextConfig.dangerous.persistentDataCache = true; | ||
const key = "test-key"; | ||
|
||
const result = createCacheKey(key, true); | ||
|
||
expect(result).toBe("test-key"); | ||
}); | ||
|
||
test("handles missing build ID", () => { | ||
process.env.NEXT_BUILD_ID = undefined; | ||
const key = "test-key"; | ||
|
||
const result = createCacheKey(key, false); | ||
|
||
expect(result).toBe("test-key"); | ||
}); | ||
}); |
Uh oh!
There was an error while loading. Please reload this page.