-
Notifications
You must be signed in to change notification settings - Fork 78
[Edge Config] Support Next.js specific IO semantics #883
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Conversation
|
The latest updates on your projects. Learn more about Vercel for GitHub.
|
🦋 Changeset detectedLatest commit: f5850a6 The changes in this PR will be included in the next version bump. Not sure what this means? Click here to learn what changesets are. Click here if you're a maintainer who wants to add another changeset to this PR |
|
Review the following changes in direct dependencies. Learn more about Socket for GitHub.
|
9730629 to
c38598c
Compare
c38598c to
b496ff5
Compare
| function setCacheLifeFromFetchCache( | ||
| fetchCache: undefined | 'force-cache' | 'no-store', | ||
| ): void { | ||
| if (fetchCache === 'force-cache') { | ||
| cacheLife('default'); | ||
| } else { | ||
| cacheLife({ stale: 60, revalidate: 0, expire: 0 }); | ||
| } | ||
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The setCacheLifeFromFetchCache function ignores the cache: 'no-store' option and enables caching anyway, which changes the behavior compared to the original index.ts and could surprise users.
View Details
📝 Patch Details
diff --git a/packages/edge-config/src/index.next-js.ts b/packages/edge-config/src/index.next-js.ts
index adea979..100db49 100644
--- a/packages/edge-config/src/index.next-js.ts
+++ b/packages/edge-config/src/index.next-js.ts
@@ -180,18 +180,32 @@ export const createClient = trace(
if (localOptions?.consistentRead) {
// fall through to fetching
} else if (shouldUseDevelopmentCache) {
- localEdgeConfig = await getInMemoryEdgeConfigForNext(
- connectionString,
- fetchCache,
- options.staleIfError,
- );
- } else {
- localEdgeConfig = await getLocalEdgeConfigForNext(
- connection.type,
- connection.id,
- fetchCache,
- );
- }
+ if (fetchCache === 'no-store') {
+ localEdgeConfig = await getInMemoryEdgeConfig(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ } else {
+ localEdgeConfig = await getInMemoryEdgeConfigForNext(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ }
+ } else if (fetchCache === 'no-store') {
+ localEdgeConfig = await getLocalEdgeConfig(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ } else {
+ localEdgeConfig = await getLocalEdgeConfigForNext(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ }
if (localEdgeConfig) {
if (isEmptyKey(key)) return undefined;
@@ -202,6 +216,16 @@ export const createClient = trace(
return Promise.resolve(localEdgeConfig.items[key] as T);
}
+ if (fetchCache === 'no-store') {
+ return fetchEdgeConfigItem<T>(
+ baseUrl,
+ key,
+ version,
+ localOptions?.consistentRead,
+ headers,
+ fetchCache,
+ );
+ }
return fetchEdgeConfigItemForNext<T>(
baseUrl,
key,
@@ -226,23 +250,47 @@ export const createClient = trace(
if (localOptions?.consistentRead) {
// fall through to fetching
} else if (shouldUseDevelopmentCache) {
- localEdgeConfig = await getInMemoryEdgeConfigForNext(
- connectionString,
- fetchCache,
- options.staleIfError,
- );
- } else {
- localEdgeConfig = await getLocalEdgeConfigForNext(
- connection.type,
- connection.id,
- fetchCache,
- );
- }
+ if (fetchCache === 'no-store') {
+ localEdgeConfig = await getInMemoryEdgeConfig(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ } else {
+ localEdgeConfig = await getInMemoryEdgeConfigForNext(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ }
+ } else if (fetchCache === 'no-store') {
+ localEdgeConfig = await getLocalEdgeConfig(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ } else {
+ localEdgeConfig = await getLocalEdgeConfigForNext(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ }
if (localEdgeConfig) {
return Promise.resolve(hasOwnProperty(localEdgeConfig.items, key));
}
+ if (fetchCache === 'no-store') {
+ return fetchEdgeConfigHas(
+ baseUrl,
+ key,
+ version,
+ localOptions?.consistentRead,
+ headers,
+ fetchCache,
+ );
+ }
return fetchEdgeConfigHasForNext(
baseUrl,
key,
@@ -268,18 +316,32 @@ export const createClient = trace(
if (localOptions?.consistentRead) {
// fall through to fetching
} else if (shouldUseDevelopmentCache) {
- localEdgeConfig = await getInMemoryEdgeConfigForNext(
- connectionString,
- fetchCache,
- options.staleIfError,
- );
- } else {
- localEdgeConfig = await getLocalEdgeConfigForNext(
- connection.type,
- connection.id,
- fetchCache,
- );
- }
+ if (fetchCache === 'no-store') {
+ localEdgeConfig = await getInMemoryEdgeConfig(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ } else {
+ localEdgeConfig = await getInMemoryEdgeConfigForNext(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ }
+ } else if (fetchCache === 'no-store') {
+ localEdgeConfig = await getLocalEdgeConfig(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ } else {
+ localEdgeConfig = await getLocalEdgeConfigForNext(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ }
if (localEdgeConfig) {
if (keys === undefined) {
@@ -289,6 +351,16 @@ export const createClient = trace(
return Promise.resolve(pick(localEdgeConfig.items, keys) as T);
}
+ if (fetchCache === 'no-store') {
+ return fetchAllEdgeConfigItem<T>(
+ baseUrl,
+ keys,
+ version,
+ localOptions?.consistentRead,
+ headers,
+ fetchCache,
+ );
+ }
return fetchAllEdgeConfigItemForNext<T>(
baseUrl,
keys,
@@ -309,23 +381,46 @@ export const createClient = trace(
if (localOptions?.consistentRead) {
// fall through to fetching
} else if (shouldUseDevelopmentCache) {
- localEdgeConfig = await getInMemoryEdgeConfigForNext(
- connectionString,
- fetchCache,
- options.staleIfError,
- );
- } else {
- localEdgeConfig = await getLocalEdgeConfigForNext(
- connection.type,
- connection.id,
- fetchCache,
- );
- }
+ if (fetchCache === 'no-store') {
+ localEdgeConfig = await getInMemoryEdgeConfig(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ } else {
+ localEdgeConfig = await getInMemoryEdgeConfigForNext(
+ connectionString,
+ fetchCache,
+ options.staleIfError,
+ );
+ }
+ } else if (fetchCache === 'no-store') {
+ localEdgeConfig = await getLocalEdgeConfig(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ } else {
+ localEdgeConfig = await getLocalEdgeConfigForNext(
+ connection.type,
+ connection.id,
+ fetchCache,
+ );
+ }
if (localEdgeConfig) {
return Promise.resolve(localEdgeConfig.digest);
}
+ if (fetchCache === 'no-store') {
+ return fetchEdgeConfigTrace(
+ baseUrl,
+ version,
+ localOptions?.consistentRead,
+ headers,
+ fetchCache,
+ );
+ }
return fetchEdgeConfigTraceForNext(
baseUrl,
version,
Analysis
setCacheLifeFromFetchCache ignores cache: 'no-store' option in Next.js wrapper
What fails: The Next.js wrapper functions in index.next-js.ts (getInMemoryEdgeConfigForNext, getLocalEdgeConfigForNext, fetchEdgeConfigItemForNext, fetchEdgeConfigHasForNext, fetchAllEdgeConfigItemForNext, fetchEdgeConfigTraceForNext) always enable function-level caching via the 'use cache' directive, even when cache: 'no-store' is explicitly specified by the user.
How to reproduce:
// Using the default cache option (no-store)
const client = createClient(process.env.EDGE_CONFIG);
const value1 = await client.get('key1');
const value2 = await client.get('key1'); // Returns cached value from first call
// Or explicitly specifying no-store
const client = createClient(process.env.EDGE_CONFIG, { cache: 'no-store' });
const value1 = await client.get('key1');
const value2 = await client.get('key1'); // Still returns cached valueResult: With cache: 'no-store', subsequent calls return the cached result from the 'use cache' directive instead of fetching fresh data. The function-level caching takes precedence over the user's explicit cache: 'no-store' option.
Expected: When cache: 'no-store' is specified, the data should be fetched fresh on every call, not cached at the function level. This matches the behavior of the non-Next.js wrapper in index.ts, which respects the cache: 'no-store' option by passing it directly to fetch calls.
Fix: Modified index.next-js.ts to conditionally call non-wrapped versions (without 'use cache') when cache: 'no-store' is specified. This respects the user's explicit opt-out of caching while still providing Next.js function-level caching optimizations for other cache modes.
Verification: All existing tests pass, confirming the fix doesn't break existing functionality.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
no, expire zero says "don't cache this actually" but the stale time is long enough that content prefetched with a edge-config read can be re-used in the browser for up to 60s without rechecking with the server. You can of course override this in the consumer so this is just a default
b496ff5 to
b19127f
Compare
This change refactors the edge-config implementation in preparation for supporting Next.js specific builds. The key goal is to prepare the function interfaces where we intend to push the "use cache" boundaries for Next.js support so they are stringly typed. This is to help with optimizing a fast path for "use cache" that can avoid some serialization. One concession in this refactor is we need to move from a closure for the dev-only SWR edge config fetcher. The problem with this architecture is the function created in the closure won't be accessible to the the individual methods (like get) when they are behind a "use cache" because functions are not serializable. The technique now employed is a lazy created SWR deduper that is keyed off the connection string. This implementation does not consider garbage collection when clients are collected because 1) this is dev only and dev servers don't live very long and 2) this library does not typically require creating arbitrarily many clients since connections are typically stable for the lifetime of a deployment. We could use a FinalizationRegistry but that would limit support to runtimes that provide this and require some careful conditional requiring which just adds bundling complexity.
b864cf4 to
7a03863
Compare
b19127f to
5920fe7
Compare
5920fe7 to
502dfec
Compare
502dfec to
9ab729d
Compare
Adds an export map for `next-js` condition and a new `index.next-js.ts` entrypoint for TSUP. converts the default test fixture to Next.js with cache components which will use the new entrypoint. Renames the existing test fixtures to use the latest Next.js release and not have cache components enabled which will continue to use the default entrypoint. To support cache components we use "use cache". However rather than actually caching all edge config reads we use cacheLife to determine whether the particular API call should be dynamic (zero expiration) or cached (default cache life). by default "use cache" will not ever reach out over the network so this should add minimal overhead to all edge config reads. However there is some serialization overhead. We will be updating Next.js to optimize cache reads that end up not caching the result (expiration zero) so that we can skip serialization and key generation. For now there may be a minor degredation in the fastest reads due to this additional overhead howevcer it will only affect users who are using Cache Components wiht Next.js and won't have any impact on other users.
9ab729d to
6d6dcb4
Compare
* extract createCreateClient factory * avoid re-exporting types * fix import
Adds an export map for
next-jscondition and a newindex.next-js.tsentrypoint for TSUP. converts the default test fixture to Next.js with cache components which will use the new entrypoint. Renames the existing test fixtures to use the latest Next.js release and not have cache components enabled which will continue to use the default entrypoint.To support cache components we use "use cache". However rather than actually caching all edge config reads we use cacheLife to determine whether the particular API call should be dynamic (zero expiration) or cached (default cache life). by default "use cache" will not ever reach out over the network so this should add minimal overhead to all edge config reads. However there is some serialization overhead. We will be updating Next.js to optimize cache reads that end up not caching the result (expiration zero) so that we can skip serialization and key generation. For now there may be a minor degredation in the fastest reads due to this additional overhead howevcer it will only affect users who are using Cache Components wiht Next.js and won't have any impact on other users.